-
Notifications
You must be signed in to change notification settings - Fork 4.8k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Sync eng/common directory with azure-sdk-tools repository (#12665)
- Loading branch information
Showing
3 changed files
with
357 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,81 @@ | ||
parameters: | ||
TargetFolder: '' | ||
|
||
steps: | ||
- task: PythonScript@0 | ||
displayName: MashUp Generated Index Site so its served from default site location | ||
inputs: | ||
scriptSource: inline | ||
script: | | ||
import argparse | ||
import os | ||
import logging | ||
import re | ||
import shutil | ||
from io import open | ||
SITE_INDEX = r'${{ parameters.SourceDirectory }}\docfx_project\_site' | ||
TOC_HTML_REGEX = r"\.\./toc.html" | ||
NAV_TOC_HTML_REGEX = r"api/" | ||
PREV_DIR_REGEX = r"\.\./" | ||
def locate_htmlfiles(directory): | ||
html_set = [] | ||
for root, dirs, files in os.walk(directory): | ||
for file in files: | ||
html_set.append(os.path.join(root, file)) | ||
return html_set | ||
def process_html(content): | ||
content = re.sub(TOC_HTML_REGEX, 'navtoc.html', content) | ||
content = re.sub(PREV_DIR_REGEX, '', content) | ||
return content | ||
|
||
def process_navtoc(content): | ||
content = re.sub(NAV_TOC_HTML_REGEX, '', content) | ||
return content | ||
|
||
if __name__ == "__main__": | ||
html_files = locate_htmlfiles(os.path.join(SITE_INDEX, 'api')) | ||
navtoc_location = os.path.join(SITE_INDEX, 'toc.html') | ||
|
||
# Process the main toc.html and rename it to navtoc.html | ||
try: | ||
logging.info( | ||
"Process {}.".format(navtoc_location) | ||
) | ||
with open(navtoc_location, "r", encoding="utf8") as navtoc_stream: | ||
navtoc_content = navtoc_stream.read() | ||
new_navtoc_content = process_navtoc(navtoc_content) | ||
logging.info("Process {}.".format(navtoc_content)) | ||
with open(navtoc_location, "w", encoding="utf8") as html_stream: | ||
html_stream.write(new_navtoc_content) | ||
except Exception as e: | ||
logging.error(e) | ||
exit(1) | ||
|
||
# Rename main toc.html to navtoc.html | ||
os.rename(navtoc_location, os.path.join(SITE_INDEX, 'navtoc.html')) | ||
|
||
# Process all html in api directory | ||
for html_location in html_files: | ||
try: | ||
logging.info( | ||
"Process {}.".format(html_location) | ||
) | ||
with open(html_location, "r", encoding="utf8") as html_stream: | ||
html_content = html_stream.read() | ||
new_content = process_html(html_content) | ||
logging.info("Process {}.".format(html_location)) | ||
with open(html_location, "w", encoding="utf8") as html_stream: | ||
html_stream.write(new_content) | ||
except Exception as e: | ||
logging.error(e) | ||
exit(1) | ||
|
||
# Move all files from api to main site home directory | ||
for html_location in html_files: | ||
shutil.copy(html_location, SITE_INDEX) | ||
|
||
# Delete API Directory | ||
shutil.rmtree(os.path.join(SITE_INDEX, 'api')) |
217 changes: 217 additions & 0 deletions
217
eng/common/pipelines/templates/steps/replace-relative-links.yml
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,217 @@ | ||
parameters: | ||
TargetFolder: '' | ||
RootFolder: '' | ||
BuildSHA: '' | ||
RepoId: '' | ||
|
||
steps: | ||
- task: PythonScript@0 | ||
displayName: Replace Relative Readme Links with Absolute References | ||
inputs: | ||
scriptSource: inline | ||
script: | | ||
import argparse | ||
import sys | ||
import os | ||
import logging | ||
import glob | ||
import re | ||
import fnmatch | ||
from io import open | ||
try: | ||
from pathlib import Path | ||
except: | ||
from pathlib2 import Path | ||
# This script is intended to be run against a single folder. All readme.md files (regardless of casing) will have the relative links | ||
# updated with appropriate full reference links. This is a recursive update.. | ||
logging.getLogger().setLevel(logging.INFO) | ||
RELATIVE_LINK_REPLACEMENT_SYNTAX = ( | ||
"https://github.com/{repo_id}/tree/{build_sha}/{target_resource_path}" | ||
) | ||
LINK_DISCOVERY_REGEX = r"\[([^\]]*)\]\(([^)]+)\)" | ||
PREDEFINED_LINK_DISCOVERY_REGEX = r"(\[[^\]]+]\:)\s*([^\s]+)" | ||
IMAGE_FILE_EXTENSIONS = ['.jpeg', '.jpg', '.png', '.gif', '.tiff'] | ||
RELATIVE_LINK_REPLACEMENT_SYNTAX_FOR_IMAGE = ( | ||
"https://github.com/{repo_id}/raw/{build_sha}/{target_resource_path}" | ||
) | ||
def locate_readmes(directory): | ||
readme_set = [] | ||
for root, dirs, files in os.walk(directory): | ||
for file in files: | ||
if file.lower() == "readme.md": | ||
readme_set.append(os.path.join(root, file)) | ||
return readme_set | ||
def is_relative_link(link_value, readme_location): | ||
link_without_location = link_value | ||
if link_without_location.find('#') > 0: | ||
link_without_location = link_without_location[0:link_without_location.find('#')] | ||
try: | ||
return os.path.exists( | ||
os.path.abspath(os.path.join(os.path.dirname(readme_location), link_without_location)) | ||
) | ||
except: | ||
return False | ||
def replace_relative_link(match, readme_location, root_folder, build_sha, repo_id): | ||
link_path = match.group(2).strip() | ||
if is_relative_link(link_path, readme_location): | ||
# if it is a relative reference, we need to find the path from the root of the repository | ||
resource_absolute_path = os.path.abspath( | ||
os.path.join(os.path.dirname(readme_location), link_path) | ||
) | ||
placement_from_root = os.path.relpath(resource_absolute_path, root_folder) | ||
suffix = Path(placement_from_root).suffix | ||
if (suffix in IMAGE_FILE_EXTENSIONS): | ||
updated_link = RELATIVE_LINK_REPLACEMENT_SYNTAX_FOR_IMAGE.format( | ||
repo_id=repo_id, | ||
build_sha=build_sha, | ||
target_resource_path=placement_from_root, | ||
).replace("\\", "/") | ||
else: | ||
updated_link = RELATIVE_LINK_REPLACEMENT_SYNTAX.format( | ||
repo_id=repo_id, | ||
build_sha=build_sha, | ||
target_resource_path=placement_from_root, | ||
).replace("\\", "/") | ||
return "[{}]({})".format(match.group(1), updated_link) | ||
else: | ||
return match.group(0) | ||
def replace_prefined_relative_links(match, readme_location, root_folder, build_sha, repo_id): | ||
link_path = match.group(2).strip() | ||
if is_relative_link(link_path, readme_location): | ||
# if it is a relative reference, we need to find the path from the root of the repository | ||
resource_absolute_path = os.path.abspath( | ||
os.path.join(os.path.dirname(readme_location), link_path) | ||
) | ||
placement_from_root = os.path.relpath(resource_absolute_path, root_folder) | ||
suffix = Path(placement_from_root).suffix | ||
if (suffix in IMAGE_FILE_EXTENSIONS): | ||
updated_link = RELATIVE_LINK_REPLACEMENT_SYNTAX_FOR_IMAGE.format( | ||
repo_id=repo_id, | ||
build_sha=build_sha, | ||
target_resource_path=placement_from_root, | ||
).replace("\\", "/") | ||
else: | ||
updated_link = RELATIVE_LINK_REPLACEMENT_SYNTAX.format( | ||
repo_id=repo_id, | ||
build_sha=build_sha, | ||
target_resource_path=placement_from_root, | ||
).replace("\\", "/") | ||
return "{} {}".format(match.group(1), updated_link) | ||
else: | ||
return match.group(0) | ||
def transfer_content_to_absolute_references( | ||
root_folder, build_sha, repo_id, readme_location, content | ||
): | ||
content = re.sub( | ||
LINK_DISCOVERY_REGEX, | ||
lambda match, readme_location=readme_location, root_folder=root_folder, build_sha=build_sha, repo_id=repo_id: replace_relative_link( | ||
match, readme_location, root_folder, build_sha, repo_id | ||
), | ||
content, | ||
) | ||
content = re.sub( | ||
PREDEFINED_LINK_DISCOVERY_REGEX, | ||
lambda match, readme_location=readme_location, root_folder=root_folder, build_sha=build_sha, repo_id=repo_id: replace_prefined_relative_links( | ||
match, readme_location, root_folder, build_sha, repo_id | ||
), | ||
content, | ||
) | ||
return content | ||
if __name__ == "__main__": | ||
parser = argparse.ArgumentParser( | ||
description="Replaces relative links for any README.md under the target folder. Given any discovered relative link, will replace with the provided repoId and SHA. Case insensitive" | ||
) | ||
parser.add_argument( | ||
"-t", | ||
"--target", | ||
dest="target_folder", | ||
help="The target folder that contains a README ", | ||
default="${{ parameters.TargetFolder }}", | ||
) | ||
parser.add_argument( | ||
"-i", | ||
"--repoid", | ||
dest="repo_id", | ||
help='The target repository used as the base for the path replacement. Full Id, example: "Azure/azure-sdk-for-net"', | ||
default="${{ parameters.RepoId }}", | ||
) | ||
parser.add_argument( | ||
"-r", | ||
"--root", | ||
dest="root_folder", | ||
help="The root directory of the repository. This gives us the ability to rationalize links in situations where a relative link traverses UPWARDS from the readme.", | ||
default="${{ parameters.RootFolder }}", | ||
) | ||
parser.add_argument( | ||
"-s", | ||
"--sha", | ||
dest="build_sha", | ||
help="The commit hash associated with this change. Using this will mean that links will never be broken.", | ||
default="${{ parameters.BuildSHA }}", | ||
) | ||
args = parser.parse_args() | ||
logging.info("Root Folder: {}".format(args.root_folder)) | ||
logging.info("Target Folder: {}".format(args.target_folder)) | ||
logging.info("Repository Id: {}".format(args.repo_id)) | ||
logging.info("Build SHA: {}".format(args.build_sha)) | ||
readme_files = locate_readmes(args.target_folder) | ||
for readme_location in readme_files: | ||
try: | ||
logging.info( | ||
"Running Relative Link Replacement on {}.".format(readme_location) | ||
) | ||
with open(readme_location, "r", encoding="utf-8") as readme_stream: | ||
readme_content = readme_stream.read() | ||
new_content = transfer_content_to_absolute_references( | ||
args.root_folder, | ||
args.build_sha, | ||
args.repo_id, | ||
readme_location, | ||
readme_content, | ||
) | ||
with open(readme_location, "w", encoding="utf-8") as readme_stream: | ||
readme_stream.write(new_content) | ||
except Exception as e: | ||
logging.error(e) | ||
exit(1) | ||
- script: | | ||
git diff -U0 | ||
displayName: Highlight Readme Updates |
59 changes: 59 additions & 0 deletions
59
eng/common/pipelines/templates/steps/verify-path-length.yml
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,59 @@ | ||
# Template for all Python Scripts in this repository | ||
parameters: | ||
SourceDirectory: '' | ||
BasePathLength: 49 | ||
|
||
steps: | ||
- task: PythonScript@0 | ||
displayName: Analyze Path Lengths | ||
inputs: | ||
scriptSource: inline | ||
script: | | ||
# Verifies Length of file path for all files in the SourceDirectory. | ||
# File paths and directory paths must be less than 260 and 248 characters respectively on windows OS | ||
# Repo users get a limited number of characters for the repo clone path. As Specified by the BasePathLength parameter. | ||
# Script makes sure that paths in the repo are less than 260 and 248 for files and directories respectively after adding the BasePathLength. | ||
import os | ||
import sys | ||
source_directory = r'${{ parameters.SourceDirectory }}' | ||
longest_file_path = '' | ||
longest_file_path_length = 0 | ||
longest_dir_path = '' | ||
longest_dir_path_length = 0 | ||
break_switch = False | ||
long_file_paths = [] | ||
long_dir_paths = [] | ||
def pluralize(string, plural_string, count): | ||
return plural_string if count > 1 else string | ||
print('Analyzing length of paths...') | ||
for root, dirs, files in os.walk('{0}'.format(source_directory)): | ||
for file in files: | ||
file_path = os.path.relpath(os.path.join(root, file), source_directory) | ||
if ((len(file_path) + ${{ parameters.BasePathLength }}) > longest_file_path_length): | ||
longest_file_path_length = len(file_path) + ${{ parameters.BasePathLength }} | ||
longest_file_path = file_path | ||
if (longest_file_path_length >= 260): | ||
long_file_paths.append(longest_file_path) | ||
dir_path = os.path.relpath(root, source_directory) | ||
if ((len(dir_path) + ${{ parameters.BasePathLength }}) > longest_dir_path_length): | ||
longest_dir_path_length = len(dir_path) + ${{ parameters.BasePathLength }} | ||
longest_dir_path = dir_path | ||
if (longest_dir_path_length >= 248): | ||
long_dir_paths.append(longest_dir_path) | ||
if (len(long_file_paths) > 0): | ||
print('With a base path length of {0} the following file path{1} exceed the allow path length of 260 characters'.format(${{ parameters.BasePathLength }}, pluralize('', 's', len(long_file_paths)))) | ||
print(*long_file_paths, sep = "\n") | ||
break_switch = True | ||
if (len(long_dir_paths) > 0): | ||
print('With a base path length of {0} the following directory path{1} exceed the allow path length of 248 characters'.format(${{ parameters.BasePathLength }}, pluralize('', 's', len(long_dir_paths)))) | ||
print(*long_dir_paths, sep = "\n") | ||
break_switch = True | ||
if break_switch == True: | ||
print("Some file paths are too long. Please reduce path lengths") | ||
exit(1) |