diff --git a/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/ExecuteNotebook.py b/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/ExecuteNotebook.py deleted file mode 100644 index f9fdbb857..000000000 --- a/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/ExecuteNotebook.py +++ /dev/null @@ -1,171 +0,0 @@ -#!/usr/bin/env python -# # Copyright 2021 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import json -import sys -import nbformat -import os -import errno -from NotebookProcessors import RemoveNoExecuteCells, UpdateVariablesPreprocessor -from typing import Dict, Tuple -import papermill as pm -import shutil -import virtualenv -import uuid -from jupyter_client.kernelspecapp import KernelSpecManager - -# This script is used to execute a notebook and write out the output notebook. -# The replaces calling the nbconvert via command-line, which doesn't write the output notebook correctly when there are errors during execution. - -STAGING_FOLDER = "staging" -ENVIRONMENTS_PATH = "environments" -KERNELS_SPECS_PATH = "kernel_specs" - - -def create_and_install_kernel() -> Tuple[str, str]: - # Create environment - kernel_name = str(uuid.uuid4()) - env_name = f"{ENVIRONMENTS_PATH}/{kernel_name}" - # venv.create(env_name, system_site_packages=True, with_pip=True) - virtualenv.cli_run([env_name, "--system-site-packages"]) - - # Create kernel spec - kernel_spec = { - "argv": [ - f"{env_name}/bin/python", - "-m", - "ipykernel_launcher", - "-f", - "{connection_file}", - ], - "display_name": "Python 3", - "language": "python", - } - kernel_spec_folder = os.path.join(KERNELS_SPECS_PATH, kernel_name) - kernel_spec_file = os.path.join(kernel_spec_folder, "kernel.json") - - # Create kernel spec folder - if not os.path.exists(os.path.dirname(kernel_spec_file)): - try: - os.makedirs(os.path.dirname(kernel_spec_file)) - except OSError as exc: # Guard against race condition - if exc.errno != errno.EEXIST: - raise - - with open(kernel_spec_file, mode="w", encoding="utf-8") as f: - json.dump(kernel_spec, f) - - # Install kernel - kernel_spec_manager = KernelSpecManager() - kernel_spec_manager.install_kernel_spec( - source_dir=kernel_spec_folder, kernel_name=kernel_name - ) - - return kernel_name, env_name - - -def execute_notebook( - notebook_file_path: str, - output_file_folder: str, - replacement_map: Dict[str, str], - should_log_output: bool, - should_use_new_kernel: bool, -): - # Create staging directory if it doesn't exist - staging_file_path = f"{STAGING_FOLDER}/{notebook_file_path}" - if not os.path.exists(os.path.dirname(staging_file_path)): - try: - os.makedirs(os.path.dirname(staging_file_path)) - except OSError as exc: # Guard against race condition - if exc.errno != errno.EEXIST: - raise - - file_name = os.path.basename(os.path.normpath(notebook_file_path)) - - # Create environments folder - if not os.path.exists(ENVIRONMENTS_PATH): - try: - os.makedirs(ENVIRONMENTS_PATH) - except OSError as exc: # Guard against race condition - if exc.errno != errno.EEXIST: - raise - - # Create and install kernel - kernel_name = next( - iter(KernelSpecManager().find_kernel_specs().keys()), None - ) # Find first existing kernel and use as default - env_name = None - if should_use_new_kernel: - kernel_name, env_name = create_and_install_kernel() - - # Read notebook - with open(notebook_file_path) as f: - nb = nbformat.read(f, as_version=4) - - has_error = False - - # Execute notebook - try: - # Create preprocessors - remove_no_execute_cells_preprocessor = RemoveNoExecuteCells() - update_variables_preprocessor = UpdateVariablesPreprocessor( - replacement_map=replacement_map - ) - - # Use no-execute preprocessor - (nb, resources,) = remove_no_execute_cells_preprocessor.preprocess(nb) - - (nb, resources) = update_variables_preprocessor.preprocess(nb, resources) - - # print(f"Staging modified notebook to: {staging_file_path}") - with open(staging_file_path, mode="w", encoding="utf-8") as f: - nbformat.write(nb, f) - - # Execute notebook - pm.execute_notebook( - input_path=staging_file_path, - output_path=staging_file_path, - kernel_name=kernel_name, - progress_bar=should_log_output, - request_save_on_cell_execute=should_log_output, - log_output=should_log_output, - stdout_file=sys.stdout if should_log_output else None, - stderr_file=sys.stderr if should_log_output else None, - ) - except Exception: - # print(f"Error executing the notebook: {notebook_file_path}.\n\n") - has_error = True - - raise - - finally: - # Clear env - if env_name is not None: - shutil.rmtree(path=env_name) - - # Copy execute notebook - output_file_path = os.path.join( - output_file_folder, "failure" if has_error else "success", file_name - ) - - # Create directories if they don't exist - if not os.path.exists(os.path.dirname(output_file_path)): - try: - os.makedirs(os.path.dirname(output_file_path)) - except OSError as exc: # Guard against race condition - if exc.errno != errno.EEXIST: - raise - - # print(f"Writing output to: {output_file_path}") - shutil.move(staging_file_path, output_file_path) diff --git a/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/cleanup/cleanup-cloudbuild.yaml b/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/cleanup/cleanup-cloudbuild.yaml index 890f5c4e9..8f83c9644 100644 --- a/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/cleanup/cleanup-cloudbuild.yaml +++ b/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/cleanup/cleanup-cloudbuild.yaml @@ -5,4 +5,4 @@ steps: args: - -c - 'python3 -m pip install -U -r .cloud-build/cleanup/cleanup-requirements.txt && python3 .cloud-build/cleanup/cleanup.py' -timeout: 86400s \ No newline at end of file +timeout: 86400s diff --git a/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/cleanup/cleanup.py b/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/cleanup/cleanup.py index 35ec9dd20..e5d345074 100644 --- a/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/cleanup/cleanup.py +++ b/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/cleanup/cleanup.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# # Copyright 2021 Google LLC +# # Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,6 +12,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + from typing import List from resource_cleanup_manager import ( ResourceCleanupManager, @@ -36,15 +37,13 @@ def run_cleanup_managers(managers: List[ResourceCleanupManager], is_dry_run: boo resource_name = manager.resource_name(resource) print(f"Will delete '{type_name}': {resource_name}") else: - manager.delete(resource) - + try: + manager.delete(resource) + except Exception as exception: + print(exception) -def set_dry_run(dry_run_status: bool): - if dry_run_status is True: - return True - print("Starting cleanup in dry run mode...") - return False +is_dry_run = False # List of all cleanup managers managers = [ @@ -53,4 +52,4 @@ def set_dry_run(dry_run_status: bool): ModelResourceCleanupManager(), ] -run_cleanup_managers(managers=managers, is_dry_run=set_dry_run(False)) +run_cleanup_managers(managers=managers, is_dry_run=is_dry_run) diff --git a/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/cleanup/resource_cleanup_manager.py b/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/cleanup/resource_cleanup_manager.py index 11f45dcc4..3f4c7f344 100644 --- a/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/cleanup/resource_cleanup_manager.py +++ b/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/cleanup/resource_cleanup_manager.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# # Copyright 2021 Google LLC +# # Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,6 +12,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + import abc from google.cloud import aiplatform from typing import Any diff --git a/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/execute_changed_notebooks_cli.py b/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/execute_changed_notebooks_cli.py new file mode 100644 index 000000000..4092178ee --- /dev/null +++ b/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/execute_changed_notebooks_cli.py @@ -0,0 +1,99 @@ +#!/usr/bin/env python +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""A CLI to process changed notebooks and execute them on Google Cloud Build""" + +import argparse +import pathlib +import execute_changed_notebooks_helper + + +def str2bool(v): + if isinstance(v, bool): + return v + if v.lower() in ("yes", "true", "t", "y", "1"): + return True + elif v.lower() in ("no", "false", "f", "n", "0"): + return False + else: + raise argparse.ArgumentTypeError("String value expected.") + + +parser = argparse.ArgumentParser(description="Run changed notebooks.") +parser.add_argument( + "--test_paths_file", + type=pathlib.Path, + help="The path to the file that has newline-limited folders of notebooks that should be tested.", + required=True, +) +parser.add_argument( + "--base_branch", + help="The base git branch to diff against to find changed files.", + required=False, +) +parser.add_argument( + "--container_uri", + type=str, + help="The container uri to run each notebook in.", + required=True, +) +parser.add_argument( + "--variable_project_id", + type=str, + help="The GCP project id. This is used to inject a variable value into the notebook before running.", + required=True, +) +parser.add_argument( + "--variable_region", + type=str, + help="The GCP region. This is used to inject a variable value into the notebook before running.", + required=True, +) +parser.add_argument( + "--staging_bucket", + type=str, + help="The GCS bucket for staging temporary files.", + required=True, +) +parser.add_argument( + "--artifacts_bucket", + type=str, + help="The GCP directory for storing executed notebooks.", + required=True, +) +parser.add_argument( + "--should_parallelize", + type=str2bool, + nargs="?", + const=True, + default=True, + help="Should run notebooks in parallel.", +) + +args = parser.parse_args() + +notebooks = execute_changed_notebooks_helper.get_changed_notebooks( + test_paths_file=args.test_paths_file, base_branch=args.base_branch, +) + +execute_changed_notebooks_helper.process_and_execute_notebooks( + notebooks=notebooks, + container_uri=args.container_uri, + staging_bucket=args.staging_bucket, + artifacts_bucket=args.artifacts_bucket, + variable_project_id=args.variable_project_id, + variable_region=args.variable_region, + should_parallelize=args.should_parallelize, +) diff --git a/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/ExecuteChangedNotebooks.py b/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/execute_changed_notebooks_helper.py similarity index 53% rename from synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/ExecuteChangedNotebooks.py rename to synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/execute_changed_notebooks_helper.py index 53974f26a..228801744 100644 --- a/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/ExecuteChangedNotebooks.py +++ b/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/execute_changed_notebooks_helper.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# # Copyright 2021 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,30 +12,23 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import argparse + +import concurrent import dataclasses import datetime import functools -import pathlib import os +import pathlib +import nbformat +import re import subprocess -from pathlib import Path from typing import List, Optional -import concurrent from tabulate import tabulate +import operator -import ExecuteNotebook - - -def str2bool(v): - if isinstance(v, bool): - return v - if v.lower() in ("yes", "true", "t", "y", "1"): - return True - elif v.lower() in ("no", "false", "f", "n", "0"): - return False - else: - raise argparse.ArgumentTypeError("Boolean value expected.") +import execute_notebook_remote +from utils import util, NotebookProcessors +from google.cloud.devtools.cloudbuild_v1.types import BuildOperationMetadata def format_timedelta(delta: datetime.timedelta) -> str: @@ -61,48 +54,127 @@ def format_timedelta(delta: datetime.timedelta) -> str: @dataclasses.dataclass class NotebookExecutionResult: - notebook: str + name: str duration: datetime.timedelta is_pass: bool + log_url: str + output_uri: str + build_id: str error_message: Optional[str] -def execute_notebook( - artifacts_path: str, +def _process_notebook( + notebook_path: str, variable_project_id: str, variable_region: str, +): + # Read notebook + with open(notebook_path) as f: + nb = nbformat.read(f, as_version=4) + + # Create preprocessors + remove_no_execute_cells_preprocessor = NotebookProcessors.RemoveNoExecuteCells() + update_variables_preprocessor = NotebookProcessors.UpdateVariablesPreprocessor( + replacement_map={"PROJECT_ID": variable_project_id, "REGION": variable_region}, + ) + + # Use no-execute preprocessor + (nb, resources,) = remove_no_execute_cells_preprocessor.preprocess(nb) + + (nb, resources) = update_variables_preprocessor.preprocess(nb, resources) + + with open(notebook_path, mode="w", encoding="utf-8") as new_file: + nbformat.write(nb, new_file) + + +def _create_tag(filepath: str) -> str: + tag = os.path.basename(os.path.normpath(filepath)) + tag = re.sub("[^0-9a-zA-Z_.-]+", "-", tag) + + if tag.startswith(".") or tag.startswith("-"): + tag = tag[1:] + + return tag + + +def process_and_execute_notebook( + container_uri: str, + staging_bucket: str, + artifacts_bucket: str, variable_project_id: str, variable_region: str, - should_log_output: bool, - should_use_new_kernel: bool, notebook: str, + should_get_tail_logs: bool = False, ) -> NotebookExecutionResult: print(f"Running notebook: {notebook}") + # Create paths + notebook_output_uri = "/".join([artifacts_bucket, pathlib.Path(notebook).name]) + + # Create tag from notebook + tag = _create_tag(filepath=notebook) + result = NotebookExecutionResult( - notebook=notebook, + name=tag, duration=datetime.timedelta(seconds=0), is_pass=False, + output_uri=notebook_output_uri, + log_url="", + build_id="", error_message=None, ) + # TODO: Handle cases where multiple notebooks have the same name time_start = datetime.datetime.now() + operation = None try: - ExecuteNotebook.execute_notebook( - notebook_file_path=notebook, - output_file_folder=artifacts_path, - replacement_map={ - "PROJECT_ID": variable_project_id, - "REGION": variable_region, - }, - should_log_output=should_log_output, - should_use_new_kernel=should_use_new_kernel, + # Pre-process notebook by substituting variable names + _process_notebook( + notebook_path=notebook, + variable_project_id=variable_project_id, + variable_region=variable_region, + ) + + # Upload the pre-processed code to a GCS bucket + code_archive_uri = util.archive_code_and_upload(staging_bucket=staging_bucket) + + operation = execute_notebook_remote.execute_notebook_remote( + code_archive_uri=code_archive_uri, + notebook_uri=notebook, + notebook_output_uri=notebook_output_uri, + container_uri=container_uri, + tag=tag, ) + + operation_metadata = BuildOperationMetadata(mapping=operation.metadata) + result.build_id = operation_metadata.build.id + result.log_url = operation_metadata.build.log_url + + # Block and wait for the result + operation.result() + result.duration = datetime.datetime.now() - time_start result.is_pass = True print(f"{notebook} PASSED in {format_timedelta(result.duration)}.") except Exception as error: + result.error_message = str(error) + + if operation and should_get_tail_logs: + # Extract the logs + logs_bucket = operation_metadata.build.logs_bucket + + # Download tail end of logs file + log_file_uri = f"{logs_bucket}/log-{result.build_id}.txt" + + # Use gcloud to get tail + try: + result.error_message = subprocess.check_output( + ["gsutil", "cat", "-r", "-1000", log_file_uri], encoding="UTF-8" + ) + except Exception as error: + result.error_message = str(error) + result.duration = datetime.datetime.now() - time_start result.is_pass = False - result.error_message = str(error) + print( f"{notebook} FAILED in {format_timedelta(result.duration)}: {result.error_message}" ) @@ -110,39 +182,12 @@ def execute_notebook( return result -def run_changed_notebooks( - test_paths_file: str, - base_branch: Optional[str], - output_folder: str, - variable_project_id: str, - variable_region: str, - should_parallelize: bool, - should_use_separate_kernels: bool, -): +def get_changed_notebooks( + test_paths_file: str, base_branch: Optional[str] = None, +) -> List[str]: """ - Run the notebooks that exist under the folders defined in the test_paths_file. - It only runs notebooks that have differences from the Git base_branch. - The executed notebooks are saved in the output_folder. - Variables are also injected into the notebooks such as the variable_project_id and variable_region. - Args: - test_paths_file (str): - Required. The new-line delimited file to folders and files that need checking. - Folders are checked recursively. - base_branch (str): - Optional. If provided, only the files that have changed from the base_branch will be checked. - If not provided, all files will be checked. - output_folder (str): - Required. The folder to write executed notebooks to. - variable_project_id (str): - Required. The value for PROJECT_ID to inject into notebooks. - variable_region (str): - Required. The value for REGION to inject into notebooks. - should_parallelize (bool): - Required. Should run notebooks in parallel using a thread pool as opposed to in sequence. - should_use_separate_kernels (bool): - Note: Dependencies don't install correctly when this is set to True - See https://github.com/nteract/papermill/issues/625 - Required. Should run each notebook in a separate and independent virtual environment. + Get the notebooks that exist under the folders defined in the test_paths_file. + It only returns notebooks that have differences from the Git base_branch. """ test_paths = [] @@ -170,14 +215,43 @@ def run_changed_notebooks( notebooks = notebooks.decode("utf-8").split("\n") notebooks = [notebook for notebook in notebooks if notebook.endswith(".ipynb")] notebooks = [notebook for notebook in notebooks if len(notebook) > 0] - notebooks = [notebook for notebook in notebooks if Path(notebook).exists()] + notebooks = [notebook for notebook in notebooks if pathlib.Path(notebook).exists()] + + return notebooks - # Create paths - artifacts_path = Path(output_folder) - artifacts_path.mkdir(parents=True, exist_ok=True) - artifacts_path.joinpath("success").mkdir(parents=True, exist_ok=True) - artifacts_path.joinpath("failure").mkdir(parents=True, exist_ok=True) +def process_and_execute_notebooks( + notebooks: List[str], + container_uri: str, + staging_bucket: str, + artifacts_bucket: str, + variable_project_id: str, + variable_region: str, + should_parallelize: bool, +): + """ + Run the notebooks that exist under the folders defined in the test_paths_file. + It only runs notebooks that have differences from the Git base_branch. + The executed notebooks are saved in the artifacts_bucket. + Variables are also injected into the notebooks such as the variable_project_id and variable_region. + Args: + test_paths_file (str): + Required. The new-line delimited file to folders and files that need checking. + Folders are checked recursively. + base_branch (str): + Optional. If provided, only the files that have changed from the base_branch will be checked. + If not provided, all files will be checked. + staging_bucket (str): + Required. The GCS staging bucket to write source code to. + artifacts_bucket (str): + Required. The GCS staging bucket to write executed notebooks to. + variable_project_id (str): + Required. The value for PROJECT_ID to inject into notebooks. + variable_region (str): + Required. The value for REGION to inject into notebooks. + should_parallelize (bool): + Required. Should run notebooks in parallel using a thread pool as opposed to in sequence. + """ notebook_execution_results: List[NotebookExecutionResult] = [] if len(notebooks) > 0: @@ -191,25 +265,25 @@ def run_changed_notebooks( notebook_execution_results = list( executor.map( functools.partial( - execute_notebook, - artifacts_path, + process_and_execute_notebook, + container_uri, + staging_bucket, + artifacts_bucket, variable_project_id, variable_region, - False, - should_use_separate_kernels, ), notebooks, ) ) else: notebook_execution_results = [ - execute_notebook( - artifacts_path=artifacts_path, + process_and_execute_notebook( + container_uri=container_uri, + staging_bucket=staging_bucket, + artifacts_bucket=artifacts_bucket, variable_project_id=variable_project_id, variable_region=variable_region, notebook=notebook, - should_log_output=True, - should_use_new_kernel=should_use_separate_kernels, ) for notebook in notebooks ] @@ -218,87 +292,36 @@ def run_changed_notebooks( print("\n=== RESULTS ===\n") - notebooks_sorted = sorted( + results_sorted = sorted( notebook_execution_results, key=lambda result: result.is_pass, reverse=True, ) + # Print results print( tabulate( [ [ - os.path.basename(os.path.normpath(result.notebook)), + result.name, "PASSED" if result.is_pass else "FAILED", format_timedelta(result.duration), - result.error_message or "--", + result.log_url, ] - for result in notebooks_sorted + for result in results_sorted ], - headers=["file", "status", "duration", "error"], + headers=["build_tag", "status", "duration", "log_url"], ) ) print("\n=== END RESULTS===\n") + total_notebook_duration = functools.reduce( + operator.add, + [datetime.timedelta(seconds=0)] + + [result.duration for result in results_sorted], + ) + + print(f"Cumulative notebook duration: {format_timedelta(total_notebook_duration)}") -parser = argparse.ArgumentParser(description="Run changed notebooks.") -parser.add_argument( - "--test_paths_file", - type=pathlib.Path, - help="The path to the file that has newline-limited folders of notebooks that should be tested.", - required=True, -) -parser.add_argument( - "--base_branch", - help="The base git branch to diff against to find changed files.", - required=False, -) -parser.add_argument( - "--output_folder", - type=pathlib.Path, - help="The path to the folder to store executed notebooks.", - required=True, -) -parser.add_argument( - "--variable_project_id", - type=str, - help="The GCP project id. This is used to inject a variable value into the notebook before running.", - required=True, -) -parser.add_argument( - "--variable_region", - type=str, - help="The GCP region. This is used to inject a variable value into the notebook before running.", - required=True, -) - -# Note: Dependencies don't install correctly when this is set to True -parser.add_argument( - "--should_parallelize", - type=str2bool, - nargs="?", - const=True, - default=False, - help="Should run notebooks in parallel.", -) - -# Note: This isn't guaranteed to work correctly due to existing Papermill issue -# See https://github.com/nteract/papermill/issues/625 -parser.add_argument( - "--should_use_separate_kernels", - type=str2bool, - nargs="?", - const=True, - default=False, - help="(Experimental) Should run each notebook in a separate and independent virtual environment.", -) - -args = parser.parse_args() -run_changed_notebooks( - test_paths_file=args.test_paths_file, - base_branch=args.base_branch, - output_folder=args.output_folder, - variable_project_id=args.variable_project_id, - variable_region=args.variable_region, - should_parallelize=args.should_parallelize, - should_use_separate_kernels=args.should_use_separate_kernels, -) + # Raise error if any notebooks failed + if not all([result.is_pass for result in results_sorted]): + raise RuntimeError("Notebook failures detected. See logs for details") diff --git a/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/execute_notebook_cli.py b/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/execute_notebook_cli.py new file mode 100644 index 000000000..9545f9c4c --- /dev/null +++ b/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/execute_notebook_cli.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""A CLI to download (optional) and run a single notebook locally""" + +import argparse +import execute_notebook_helper + +parser = argparse.ArgumentParser(description="Run a single notebook locally.") +parser.add_argument( + "--notebook_source", + type=str, + help="Local filepath or GCS URI to notebook.", + required=True, +) +parser.add_argument( + "--output_file_or_uri", + type=str, + help="Local file or GCS URI to save executed notebook to.", + required=True, +) + +args = parser.parse_args() +execute_notebook_helper.execute_notebook( + notebook_source=args.notebook_source, + output_file_or_uri=args.output_file_or_uri, + should_log_output=True, +) diff --git a/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/execute_notebook_helper.py b/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/execute_notebook_helper.py new file mode 100644 index 000000000..32c7de372 --- /dev/null +++ b/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/execute_notebook_helper.py @@ -0,0 +1,89 @@ +#!/usr/bin/env python +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Methods to run a notebook locally""" + +import sys +import os +import errno +import papermill as pm +import shutil + +from utils import util +from google.cloud.aiplatform import utils + +# This script is used to execute a notebook and write out the output notebook. + + +def execute_notebook( + notebook_source: str, output_file_or_uri: str, should_log_output: bool, +): + """Execute a single notebook using Papermill""" + file_name = os.path.basename(os.path.normpath(notebook_source)) + + # Download notebook if it's a GCS URI + if notebook_source.startswith("gs://"): + # Extract uri components + bucket_name, prefix = utils.extract_bucket_and_prefix_from_gcs_path( + notebook_source + ) + + # Download remote notebook to local file system + notebook_source = file_name + util.download_file( + bucket_name=bucket_name, blob_name=prefix, destination_file=notebook_source + ) + + execution_exception = None + + # Execute notebook + try: + # Execute notebook + pm.execute_notebook( + input_path=notebook_source, + output_path=notebook_source, + progress_bar=should_log_output, + request_save_on_cell_execute=should_log_output, + log_output=should_log_output, + stdout_file=sys.stdout if should_log_output else None, + stderr_file=sys.stderr if should_log_output else None, + ) + except Exception as exception: + execution_exception = exception + finally: + # Copy executed notebook + if output_file_or_uri.startswith("gs://"): + # Upload to GCS path + util.upload_file(notebook_source, remote_file_path=output_file_or_uri) + + print("\n=== EXECUTION FINISHED ===\n") + print( + f"Please debug the executed notebook by downloading: {output_file_or_uri}" + ) + print("\n======\n") + else: + # Create directories if they don't exist + if not os.path.exists(os.path.dirname(output_file_or_uri)): + try: + os.makedirs(os.path.dirname(output_file_or_uri)) + except OSError as exc: # Guard against race condition + if exc.errno != errno.EEXIST: + raise + + print(f"Writing output to: {output_file_or_uri}") + shutil.move(notebook_source, output_file_or_uri) + + if execution_exception: + raise execution_exception diff --git a/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/execute_notebook_remote.py b/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/execute_notebook_remote.py new file mode 100644 index 000000000..fc4bc6411 --- /dev/null +++ b/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/execute_notebook_remote.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Methods to run a notebook on Google Cloud Build""" + +from google.protobuf import duration_pb2 +from yaml.loader import FullLoader + +import google.auth +from google.cloud.devtools import cloudbuild_v1 +from google.cloud.devtools.cloudbuild_v1.types import Source, StorageSource + +from typing import Optional +import yaml + +from google.cloud.aiplatform import utils +from google.api_core import operation + +CLOUD_BUILD_FILEPATH = ".cloud-build/notebook-execution-test-cloudbuild-single.yaml" +TIMEOUT_IN_SECONDS = 86400 + + +def execute_notebook_remote( + code_archive_uri: str, + notebook_uri: str, + notebook_output_uri: str, + container_uri: str, + tag: Optional[str], +) -> operation.Operation: + """Create and execute a single notebook on Google Cloud Build""" + + # Authorize the client with Google defaults + credentials, project_id = google.auth.default() + client = cloudbuild_v1.services.cloud_build.CloudBuildClient() + + build = cloudbuild_v1.Build() + + # The following build steps will output "hello world" + # For more information on build configuration, see + # https://cloud.google.com/build/docs/configuring-builds/create-basic-configuration + cloudbuild_config = yaml.load(open(CLOUD_BUILD_FILEPATH), Loader=FullLoader) + + substitutions = { + "_PYTHON_IMAGE": container_uri, + "_NOTEBOOK_GCS_URI": notebook_uri, + "_NOTEBOOK_OUTPUT_GCS_URI": notebook_output_uri, + } + + ( + source_archived_file_gcs_bucket, + source_archived_file_gcs_object, + ) = utils.extract_bucket_and_prefix_from_gcs_path(code_archive_uri) + + build.source = Source( + storage_source=StorageSource( + bucket=source_archived_file_gcs_bucket, + object_=source_archived_file_gcs_object, + ) + ) + + build.steps = cloudbuild_config["steps"] + build.substitutions = substitutions + build.timeout = duration_pb2.Duration(seconds=TIMEOUT_IN_SECONDS) + build.queue_ttl = duration_pb2.Duration(seconds=TIMEOUT_IN_SECONDS) + + if tag: + build.tags = [tag] + + operation = client.create_build(project_id=project_id, build=build) + # Print the in-progress operation + # TODO(developer): Uncomment next two lines + # print("IN PROGRESS:") + # print(operation.metadata) + + # Print the completed status + # TODO(developer): Uncomment next line + # print("RESULT:", result.status) + return operation diff --git a/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/notebook-execution-test-cloudbuild-single.yaml b/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/notebook-execution-test-cloudbuild-single.yaml new file mode 100644 index 000000000..5e12755b1 --- /dev/null +++ b/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/notebook-execution-test-cloudbuild-single.yaml @@ -0,0 +1,31 @@ +steps: + # Show the gcloud info and check if gcloud exists + - name: ${_PYTHON_IMAGE} + entrypoint: /bin/sh + args: + - -c + - 'gcloud config list' + # Check the Python version + - name: ${_PYTHON_IMAGE} + entrypoint: /bin/sh + args: + - -c + - 'python3 .cloud-build/CheckPythonVersion.py' + # Install Python dependencies + - name: ${_PYTHON_IMAGE} + entrypoint: /bin/sh + args: + - -c + - 'python3 -m pip install -U pip && python3 -m pip install -U --user -r .cloud-build/requirements.txt' + # Install Python dependencies and run testing script + - name: ${_PYTHON_IMAGE} + entrypoint: /bin/sh + args: + - -c + - 'python3 -m pip install -U pip && python3 -m pip freeze && python3 .cloud-build/execute_notebook_cli.py --notebook_source "${_NOTEBOOK_GCS_URI}" --output_file_or_uri "${_NOTEBOOK_OUTPUT_GCS_URI}"' + env: + - 'IS_TESTING=1' +timeout: 86400s +options: + pool: + name: ${_PRIVATE_POOL_NAME} \ No newline at end of file diff --git a/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/notebook-execution-test-cloudbuild.yaml b/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/notebook-execution-test-cloudbuild.yaml index 15d1d8283..4d9d84756 100644 --- a/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/notebook-execution-test-cloudbuild.yaml +++ b/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/notebook-execution-test-cloudbuild.yaml @@ -1,44 +1,41 @@ steps: # Show the gcloud info and check if gcloud exists - - name: 'gcr.io/cloud-devrel-public-resources/python-samples-testing-docker:latest' + - name: ${_PYTHON_IMAGE} entrypoint: /bin/sh args: - -c - 'gcloud config list' + # # Clone the Git repo + # - name: ${_PYTHON_IMAGE} + # entrypoint: git + # args: ['clone', "${_GIT_REPO}", "--branch", "${_GIT_BRANCH_NAME}", "."] # Check the Python version - - name: 'gcr.io/cloud-devrel-public-resources/python-samples-testing-docker:latest' + - name: ${_PYTHON_IMAGE} entrypoint: /bin/sh args: - -c - 'python3 .cloud-build/CheckPythonVersion.py' # Fetch base branch if required - - name: 'gcr.io/cloud-devrel-public-resources/python-samples-testing-docker:latest' + - name: ${_PYTHON_IMAGE} entrypoint: /bin/sh args: - -c - 'if [ -n "${_BASE_BRANCH}" ]; then git fetch origin "${_BASE_BRANCH}":refs/remotes/origin/"${_BASE_BRANCH}"; else echo "Skipping fetch."; fi' # Install Python dependencies - - name: 'gcr.io/cloud-devrel-public-resources/python-samples-testing-docker:latest' - entrypoint: pip - args: ['install', '--upgrade', '--user', '--requirement', '.cloud-build/requirements.txt'] + - name: ${_PYTHON_IMAGE} + entrypoint: /bin/sh + args: + - -c + - 'python3 -m pip install -U pip && python3 -m pip install -U --user -r .cloud-build/requirements.txt' # Install Python dependencies and run testing script - - name: 'gcr.io/cloud-devrel-public-resources/python-samples-testing-docker:latest' + - name: ${_PYTHON_IMAGE} entrypoint: /bin/sh args: - -c - - 'python3 -m pip freeze && python3 .cloud-build/ExecuteChangedNotebooks.py --test_paths_file "${_TEST_PATHS_FILE}" --base_branch "${_FORCED_BASE_BRANCH}" --output_folder ${BUILD_ID} --variable_project_id ${PROJECT_ID} --variable_region ${_GCP_REGION}' + - 'python3 -m pip install -U pip && python3 -m pip freeze && python3 .cloud-build/execute_changed_notebooks_cli.py --test_paths_file "${_TEST_PATHS_FILE}" --base_branch "${_FORCED_BASE_BRANCH}" --container_uri ${_PYTHON_IMAGE} --staging_bucket ${_GCS_STAGING_BUCKET} --artifacts_bucket ${_GCS_STAGING_BUCKET}/executed_notebooks/PR_${_PR_NUMBER}/BUILD_${BUILD_ID} --variable_project_id ${PROJECT_ID} --variable_region ${_GCP_REGION}' env: - 'IS_TESTING=1' - # Manually copy artifacts to GCS - - name: gcr.io/cloud-builders/gsutil - entrypoint: /bin/sh - args: - - -c - - 'if [ $(ls -pR "/workspace/${BUILD_ID}" | grep -v / | grep -v ^$ | wc -l) -ne 0 ]; then gsutil -m -q rsync -r "/workspace/${BUILD_ID}" "gs://${_GCS_ARTIFACTS_BUCKET}/test-artifacts/PR_${_PR_NUMBER}/BUILD_${BUILD_ID}/"; else echo "No artifacts to copy."; fi' - # Fail if there is anything in the failure folder - - name: 'gcr.io/cloud-devrel-public-resources/python-samples-testing-docker:latest' - entrypoint: /bin/sh - args: - - -c - - 'echo "Download executed notebooks with this command: \"mkdir -p artifacts && gsutil rsync -r gs://${_GCS_ARTIFACTS_BUCKET}/test-artifacts/PR_${_PR_NUMBER}/BUILD_${BUILD_ID} artifacts/\"" && if [ "$(ls -A /workspace/${BUILD_ID}/failure | wc -l)" -ne 0 ]; then exit 1; else exit 0; fi' -timeout: 86400s \ No newline at end of file +timeout: 86400s +options: + pool: + name: ${_PRIVATE_POOL_NAME} \ No newline at end of file diff --git a/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/requirements.txt b/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/requirements.txt index a31104871..c111762a1 100644 --- a/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/requirements.txt +++ b/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/requirements.txt @@ -5,4 +5,8 @@ papermill==2.3 numpy==1.22.3 pandas==1.4.1 matplotlib==3.4 -tabulate==0.8.9 \ No newline at end of file +tabulate==0.8.9 +google-cloud-aiplatform +google-cloud-storage +google-cloud-build +gcloud \ No newline at end of file diff --git a/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/test_folders.txt b/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/test_folders.txt new file mode 100644 index 000000000..6eb63c85f --- /dev/null +++ b/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/test_folders.txt @@ -0,0 +1 @@ +notebooks/official \ No newline at end of file diff --git a/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/NotebookProcessors.py b/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/utils/NotebookProcessors.py similarity index 92% rename from synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/NotebookProcessors.py rename to synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/utils/NotebookProcessors.py index 90a61a51c..1b7934ea6 100644 --- a/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/NotebookProcessors.py +++ b/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/utils/NotebookProcessors.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# # Copyright 2021 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,9 +12,10 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + from nbconvert.preprocessors import Preprocessor from typing import Dict -import UpdateNotebookVariables +from . import UpdateNotebookVariables as update_notebook_variables class RemoveNoExecuteCells(Preprocessor): @@ -40,7 +41,7 @@ def update_variables(content: str, replacement_map: Dict[str, str]): # VARIABLE_NAME = '[description]' for variable_name, variable_value in replacement_map.items(): - content = UpdateNotebookVariables.get_updated_value( + content = update_notebook_variables.get_updated_value( content=content, variable_name=variable_name, variable_value=variable_value, diff --git a/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/UpdateNotebookVariables.py b/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/utils/UpdateNotebookVariables.py similarity index 98% rename from synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/UpdateNotebookVariables.py rename to synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/utils/UpdateNotebookVariables.py index b357d7854..c602e4903 100644 --- a/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/UpdateNotebookVariables.py +++ b/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/utils/UpdateNotebookVariables.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# # Copyright 2021 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,6 +12,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + import re """ diff --git a/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/utils/__init__.py b/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/utils/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/utils/utils.py b/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/utils/utils.py new file mode 100644 index 000000000..c72aedd1d --- /dev/null +++ b/synthtool/gcp/templates/python_notebooks_testing_pipeline/.cloud-build/utils/utils.py @@ -0,0 +1,67 @@ +#!/usr/bin/env python +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import subprocess +import tarfile +import uuid + + +def download_file(bucket_name: str, blob_name: str, destination_file: str) -> str: + """Copies a remote GCS file to a local path""" + remote_file_path = "".join(["gs://", "/".join([bucket_name, blob_name])]) + + subprocess.check_output( + ["gsutil", "cp", remote_file_path, destination_file], encoding="UTF-8" + ) + + return destination_file + + +def upload_file(local_file_path: str, remote_file_path: str,) -> str: + """Copies a local file to a GCS path""" + subprocess.check_output( + ["gsutil", "cp", local_file_path, remote_file_path], encoding="UTF-8" + ) + + return remote_file_path + + +def archive_code_and_upload(staging_bucket: str): + # Archive all source in current directory + unique_id = uuid.uuid4() + source_archived_file = f"source_archived_{unique_id}.tar.gz" + + git_files = subprocess.check_output( + ["git", "ls-tree", "-r", "HEAD", "--name-only"], encoding="UTF-8" + ).split("\n") + + with tarfile.open(source_archived_file, "w:gz") as tar: + for file in git_files: + if len(file) > 0 and os.path.exists(file): + tar.add(file) + + # Upload archive to GCS bucket + source_archived_file_gcs = upload_file( + local_file_path=f"{source_archived_file}", + remote_file_path="/".join( + [staging_bucket, "code_archives", source_archived_file] + ), + ) + + print(f"Uploaded source code archive to {source_archived_file_gcs}") + + return source_archived_file_gcs diff --git a/synthtool/gcp/templates/python_notebooks_testing_pipeline/.github/workflows/ci.yaml b/synthtool/gcp/templates/python_notebooks_testing_pipeline/.github/workflows/ci.yaml index d8b78f268..7fab63af1 100644 --- a/synthtool/gcp/templates/python_notebooks_testing_pipeline/.github/workflows/ci.yaml +++ b/synthtool/gcp/templates/python_notebooks_testing_pipeline/.github/workflows/ci.yaml @@ -19,8 +19,10 @@ jobs: - name: Format and lint notebooks run: | set +e + .github/workflows/linter/run_linter.sh -t RTN=$? + if [ "$RTN" != "0" ]; then echo "There were problems formatting/linting the notebooks." echo "Please run the following commands locally from the root directory to attempt to autofix the issues:" diff --git a/synthtool/gcp/templates/python_notebooks_testing_pipeline/.github/workflows/linter/run_linter.sh b/synthtool/gcp/templates/python_notebooks_testing_pipeline/.github/workflows/linter/run_linter.sh index 7b7cadb22..102c33a51 100644 --- a/synthtool/gcp/templates/python_notebooks_testing_pipeline/.github/workflows/linter/run_linter.sh +++ b/synthtool/gcp/templates/python_notebooks_testing_pipeline/.github/workflows/linter/run_linter.sh @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -# This script automatically formats and lints all notebooks that have changed from the head of the master branch. +# This script automatically formats and lints all notebooks that have changed from the head of the main branch. # # Options: # -t: Test-mode. Only test if format and linting are required but make no changes to files. @@ -32,16 +32,16 @@ RTN="0" is_test=false -# Process all options supplied on the command line +# Process all options supplied on the command line while getopts 'tc' arg; do case $arg in - 't') - is_test=true - ;; - *) - echo "Unimplemented flag" - exit 1 - ;; + 't') + is_test=true + ;; + *) + echo "Unimplemented flag" + exit 1 + ;; esac done @@ -50,15 +50,13 @@ echo "Test mode: $is_test" # Only check notebooks in test folders modified in this pull request. # Note: Use process substitution to persist the data in the array notebooks=() -while read -r file || [ -n "$line" ]; -do +while read -r file || [ -n "$line" ]; do notebooks+=("$file") done < <(git diff --name-only main... | grep '\.ipynb$') problematic_notebooks=() if [ ${#notebooks[@]} -gt 0 ]; then - for notebook in "${notebooks[@]}" - do + for notebook in "${notebooks[@]}"; do if [ -f "$notebook" ]; then echo "Checking notebook: ${notebook}" @@ -68,7 +66,7 @@ if [ ${#notebooks[@]} -gt 0 ]; then ISORT_RTN="0" FLAKE8_RTN="0" - if [ "$is_test" = true ] ; then + if [ "$is_test" = true ]; then echo "Running nbfmt..." python3 -m tensorflow_docs.tools.nbfmt --remove_outputs --test "$notebook" NBFMT_RTN=$? @@ -82,24 +80,24 @@ if [ ${#notebooks[@]} -gt 0 ]; then python3 -m nbqa isort "$notebook" --check ISORT_RTN=$? echo "Running flake8..." - python3 -m nbqa flake8 "$notebook" --show-source --extend-ignore=W391,E501,F821,E402,F404,W503,W291,E203,E999,E111,E113 + python3 -m nbqa flake8 "$notebook" --show-source --extend-ignore=W391,E501,F821,E402,F404,W503,E203,E722,W293,W291 FLAKE8_RTN=$? else echo "Running black..." - python3 -m black "$notebook" - BLACK_RTN=$? + python3 -m nbqa black "$notebook" + BLACK_RTN=$? echo "Running pyupgrade..." - python3 -m nbqa pyupgrade "$notebook" --nbqa-mutate + python3 -m nbqa pyupgrade "$notebook" PYUPGRADE_RTN=$? echo "Running isort..." - python3 -m nbqa isort "$notebook" --nbqa-mutate + python3 -m nbqa isort "$notebook" ISORT_RTN=$? echo "Running nbfmt..." python3 -m tensorflow_docs.tools.nbfmt --remove_outputs "$notebook" NBFMT_RTN=$? echo "Running flake8..." - python3 -m nbqa flake8 "$notebook" --show-source --extend-ignore=W391,E501,F821,E402,F404,W503,W291,E203,E999,E111,E113 - FLAKE8_RTN=$? + python3 -m nbqa flake8 "$notebook" --show-source --extend-ignore=W391,E501,F821,E402,F404,W503,E203,E722,W293,W291 + FLAKE8_RTN=$? fi NOTEBOOK_RTN="0" @@ -108,7 +106,7 @@ if [ ${#notebooks[@]} -gt 0 ]; then NOTEBOOK_RTN="$NBFMT_RTN" printf "nbfmt: Failed\n" fi - + if [ "$BLACK_RTN" != "0" ]; then NOTEBOOK_RTN="$BLACK_RTN" printf "black: Failed\n" @@ -131,10 +129,9 @@ if [ ${#notebooks[@]} -gt 0 ]; then echo "Notebook lint finished with return code = $NOTEBOOK_RTN" echo "" - if [ "$NOTEBOOK_RTN" != "0" ] - then - problematic_notebooks+=("$notebook") - RTN=$NOTEBOOK_RTN + if [ "$NOTEBOOK_RTN" != "0" ]; then + problematic_notebooks+=("$notebook") + RTN=$NOTEBOOK_RTN fi fi done