diff --git a/.editorconfig b/.editorconfig index 5cf5aed0..b362f49d 100644 --- a/.editorconfig +++ b/.editorconfig @@ -64,7 +64,7 @@ # SERIOUSLY. # # ------------------------------ -# Generated by edx-lint version: 5.2.5 +# Generated by edx-lint version: 5.3.4 # ------------------------------ [*] end_of_line = lf @@ -97,4 +97,4 @@ max_line_length = 72 [*.rst] max_line_length = 79 -# f2f02689fced7a2e0c62c2f9803184114dc2ae4b +# bbcbced841ed335dd8abb7456a6b13485d701b40 diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6bffccd3..0b7d74b1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -2,7 +2,7 @@ name: Python CI on: push: - branches: [ master ] + branches: [master] pull_request: jobs: @@ -11,13 +11,14 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - os: [ubuntu-20.04] - python-version: ['3.8'] + os: [ubuntu-latest] + python-version: + - '3.12' steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: setup python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} diff --git a/.github/workflows/pypi-publish.yml b/.github/workflows/pypi-publish.yml index 6e6ddb52..a8e070d3 100644 --- a/.github/workflows/pypi-publish.yml +++ b/.github/workflows/pypi-publish.yml @@ -8,13 +8,13 @@ on: jobs: push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: setup python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.8 @@ -28,7 +28,7 @@ jobs: run: python setup.py sdist bdist_wheel - name: Publish to PyPi - uses: pypa/gh-action-pypi-publish@master + uses: pypa/gh-action-pypi-publish@release/v1 with: user: __token__ password: ${{ secrets.PYPI_UPLOAD_TOKEN }} diff --git a/.github/workflows/support-window-issue-creator.yml b/.github/workflows/support-window-issue-creator.yml index d81a3e28..fb7946c5 100644 --- a/.github/workflows/support-window-issue-creator.yml +++ b/.github/workflows/support-window-issue-creator.yml @@ -6,7 +6,7 @@ on: jobs: create_issue: name: Create Support Window Issue - runs-on: ubuntu-20.04 + runs-on: ubuntu-latest permissions: issues: write steps: diff --git a/.github/workflows/upgrade-python-requirements.yml b/.github/workflows/upgrade-python-requirements.yml index aaf1ebcc..57779a50 100644 --- a/.github/workflows/upgrade-python-requirements.yml +++ b/.github/workflows/upgrade-python-requirements.yml @@ -13,9 +13,8 @@ jobs: call-upgrade-python-requirements-workflow: with: branch: ${{ github.event.inputs.branch }} - team_reviewers: "arbi-bom" - email_address: arbi-bom@edx.org send_success_notification: false + python_version: "3.12" secrets: requirements_bot_github_token: ${{ secrets.REQUIREMENTS_BOT_GITHUB_TOKEN }} requirements_bot_github_email: ${{ secrets.REQUIREMENTS_BOT_GITHUB_EMAIL }} diff --git a/README.rst b/README.rst index 25b1d0f8..11a45955 100644 --- a/README.rst +++ b/README.rst @@ -64,6 +64,13 @@ To work on these tools: there with third-party requirements intended just for your tool. This will automatically create an installable "extra" for your requirements. +Active Tools +============ + +repo_checks +----------- + +See the `repo_checks README `_ in its subfolder. Older Tools =========== diff --git a/barcalendar.py b/barcalendar.py index 4e07e63b..74c71640 100644 --- a/barcalendar.py +++ b/barcalendar.py @@ -4,9 +4,9 @@ 0. Update the data. Search for "Editable content" below. 1. Run this program. It prints JavaScript code. Copy it. 2. Open a Google Sheet, either the existing Support Windows spreadsheet - (https://docs.google.com/spreadsheets/d/11DheEtMDGrbA9hsUvZ2SEd4Cc8CaC4mAfoV8SVaLBGI) + (https://docs.google.com/spreadsheets/d/1wtpoypH1XOPc_G6h9AUNXJ6XiNKD6dlkMP3lubdpE9I) or a new spreadsheet. -3. If the current tab isn't empty, open a new tab (Add Sheet). +3. Select the "Support Windows" sheet or create a new sheet if it doesn't exist already. 4. Open the script editor (Extensions - Apps Script). 5. If there's any code there, delete it. 6. Paste the JavaScript code this program wrote. @@ -16,15 +16,94 @@ """ +import atexit import colorsys import datetime import itertools +import logging import re import time import requests import yaml + +def setup_custom_logging(): + """ + Create a custom logger to flush all Warning logs at the end of the script + to avoid breaking the console script being generated by the script + """ + class DelayedLogHandler(logging.Handler): + def __init__(self): + super().__init__() + self.log_records = [] + + def emit(self, record): + self.log_records.append(f"// {record}") + + def _flush_log_records(handler): + for record in handler.log_records: + print(record) + + # Create a custom log handler + custom_log_handler = DelayedLogHandler() + custom_log_handler.setLevel(logging.WARNING) + + # Create custom logger and attach custom_handler to it + eol_logger = logging.getLogger("eol_logger") + eol_logger.addHandler(custom_log_handler) + + # Register a function to flush log records at exit + atexit.register(_flush_log_records, custom_log_handler) + + # Return custom logger to use in the script + return eol_logger + +def validate_version_date(project, version, year, month, check_start=False): + """ + Validates the end-of-life(EOL) or release date for a project version. + Returns the correct EOL/releaseDate in case of a conflict with api value. + + Parameters: + project (str): The name of the project. + version (str): The version of the project. + year (int): The year of the EOL/release date. + month (int): The month of the EOL/release date. + check_start (bool): Boolean flag to check either EOL or release date + + Returns: + tuple: A tuple representing the validated EOL/release date (year, month) + """ + try: + response = requests.get( + url=f"https://endoflife.date/api/{project}/{version}.json", + headers={'Accept': 'application/json'} + ) + + if response.status_code != 200: + eol_logger.error(f"Project not Found: {project}/{version} not found on endoflife.date") + return (year, month) + + version_info = response.json() + if check_start: + eol_year, eol_month, _ = version_info['releaseDate'].split("-") + date_type = "releaseDate" + else: + if version_info['eol'] == False: + eol_logger.warning(f"No EOL found for {project} {version}") + return (year, month) + eol_year, eol_month, _ = version_info['eol'].split("-") + date_type = "eol" + + if (year, month) == (int(eol_year), int(eol_month)): + return (year, month) + else: + eol_logger.warning(f"Invalid EOL: Update {project} {version} {date_type} to {eol_month}/{eol_year} instead") + return (int(eol_year), int(eol_month)) + except requests.exceptions.RequestException as e: + eol_logger.error(f"API request failed with an exception: {str(e)}") + return (year, month) + def css_to_rgb(hex): assert hex[0] == "#" return [int(h, 16)/255 for h in [hex[1:3], hex[3:5], hex[5:7]]] @@ -312,6 +391,7 @@ def parse_version_name(line): return version_name.capitalize() raise ValueError(f"Couldn't get version name from {line!r}") +eol_logger = setup_custom_logging() # ==== Editable content ==== # Global Options @@ -324,22 +404,22 @@ def parse_version_name(line): # The current versions of everything. Use the same strings as the keys in the various sections below. CURRENT = { "Open edX": parse_version_name(versions['OPENEDX_COMMON_VERSION']), - "Python": "3.8", - "Django": "3.2", + "Python": "3.11", + "Django": "4.2", "Ubuntu": "20.04", - "Node": "16.x", + "Node": "18.x", "Mongo": parse_version_number(versions['DOCKER_IMAGE_MONGODB']), "MySQL": parse_version_number(versions['DOCKER_IMAGE_MYSQL']), "Elasticsearch": parse_version_number(versions['DOCKER_IMAGE_ELASTICSEARCH']), "Redis": parse_version_number(versions['DOCKER_IMAGE_REDIS']), - "Ruby": "3.0", + "Ruby": "3.3", } EDX = { - "Python": "3.8", - "Django": "3.2", + "Python": "3.11", + "Django": "4.2", "Ubuntu": "20.04", - "Node": "16.x", + "Node": "18.x", "Mongo": "4.2", "MySQL": "5.7", "Elasticsearch": "7.10", @@ -369,9 +449,15 @@ def parse_version_name(line): ('Juniper', 2020, 6), ("Koa", 2020, 12), ("Lilac", 2021, 6), + ("Maple", 2021, 12), + ("Nutmeg", 2022, 6), + ("Olive", 2022, 12), + ("Palm", 2023, 6), + ("Quince", 2023, 12), + ("Redwood", 2024, 6), ] # https://www.treenames.net/common_tree_names.html -future = ["Maple", "Nutmeg", "Olive", "Palm", "Quince", "Redwood"] + list("STUVWXYZ") +future = ["Sumac", "Teak"] + list("UVWXYZ") target_length = 6 # months per release releases = list(itertools.chain(names, [(name, None, None) for name in future])) @@ -411,15 +497,20 @@ def parse_version_name(line): # ('1.11', 2017, 4, True), # ('2.0', 2018, 1, False), # ('2.1', 2018, 8, False), - ('2.2', 2019, 4, True), - ('3.0', 2020, 1, False), - ('3.1', 2020, 8, False), + # ('2.2', 2019, 4, True), + # ('3.0', 2020, 1, False), + # ('3.1', 2020, 8, False), ('3.2', 2021, 4, True), - ('4.0', 2022, 1, False), + ('4.0', 2021, 12, False), ('4.1', 2022, 8, False), ('4.2', 2023, 4, True, "Django 4.2 work is being tracked in https://github.com/openedx/platform-roadmap/issues/269"), + ('5.0', 2023, 12, False), + ('5.1', 2024, 8, False), + ('5.2', 2025, 4, True), + ] for name, year, month, lts, *more in django_releases: + year, month = validate_version_date("Django", name, year, month, check_start=True) if LTS_ONLY and not lts: continue length = 3*12 if lts else 16 @@ -449,6 +540,7 @@ def parse_version_name(line): ('3.12', 2023, 10, 2028, 10), # https://peps.python.org/pep-0693/ ] for name, syear, smonth, eyear, emonth in python_releases: + eyear, emonth = validate_version_date("Python", name, eyear, emonth) cal.bar( f"Python {name}", start=(syear, smonth), @@ -465,6 +557,7 @@ def parse_version_name(line): '18.04': 'Bionic Beaver', '20.04': 'Focal Fossa', '22.04': 'Jammy Jellyfish', + '24.04': 'Noble Numbat', } for year, month in itertools.product(range(START_YEAR % 100, END_YEAR % 100), [4, 10]): @@ -477,9 +570,10 @@ def parse_version_name(line): nick = ubuntu_nicks.get(name, '') if nick: nick = f" {nick}" + year, month = validate_version_date("Ubuntu", name, 2000+year, month, check_start=True) cal.bar( f"Ubuntu {name}{nick}", - (2000+year, month), + (year, month), length=length, color=color, text_color="white", @@ -495,11 +589,14 @@ def parse_version_name(line): #('8.x', 2017, 5, 2019, 12), # ('10.x', 2018, 4, 2021, 4), # ('12.x', 2019, 4, 2022, 4), - ('14.x', 2020, 4, 2023, 4), - ('16.x', 2021, 4, 2023, 9), # https://nodejs.org/en/blog/announcements/nodejs16-eol/ - ('18.x', 2022, 4, 2025, 4), + ('14', 2020, 4, 2023, 4), + ('16', 2021, 4, 2023, 9), # https://nodejs.org/en/blog/announcements/nodejs16-eol/ + ('18', 2022, 4, 2025, 4), + ('20', 2023, 4, 2026, 4), + ('22', 2024, 4, 2027, 4), ] for name, syear, smonth, eyear, emonth in node_releases: + eyear, emonth = validate_version_date("NodeJS", name, eyear, emonth) cal.bar( f"Node {name}", start=(syear, smonth), @@ -521,8 +618,11 @@ def parse_version_name(line): ('4.0', 2018, 6, 2022, 4), ('4.2', 2019, 8, 2023, 4), ('4.4', 2020, 7, 2024, 2), + ('5.0', 2021, 7, 2024, 10), + ('7.0', 2023, 8, 2026, 8), ] for name, syear, smonth, eyear, emonth in mongo_releases: + eyear, emonth = validate_version_date("mongo", name, eyear, emonth) cal.bar( f"Mongo {name}", start=(syear, smonth), @@ -539,9 +639,12 @@ def parse_version_name(line): ('5.6', 2013, 2, 2021, 2), ('5.7', 2015, 10, 2023, 10), ('8.0', 2018, 4, 2026, 4), - ('8.1', 2023, 6, 2024, 9), # Not sure of the real support dates. + ('8.1', 2023, 6, 2023, 10), + ('8.4', 2024, 4, 2032, 4), + ('9.0', 2024, 7, 2025, 7), ] for name, syear, smonth, eyear, emonth in mysql_releases: + eyear, emonth = validate_version_date("MySQL", name, eyear, emonth) cal.bar( f"MySQL {name}", start=(syear, smonth), @@ -565,9 +668,11 @@ def parse_version_name(line): ('7.11', 2021, 2, 2022, 8), ('7.12', 2021, 3, 2022, 9), ('7.13', 2021, 5, 2022, 11), - ('7.17', 2022, 2, 2023, 8), + ('7.17', 2022, 2, 2025, 1), + ('8.15', 2022, 2, 2026, 2), ] for name, syear, smonth, eyear, emonth in es_releases: + eyear, emonth = validate_version_date("ElasticSearch", name, eyear, emonth) cal.bar( f"Elasticsearch {name}", start=(syear, smonth), @@ -582,12 +687,14 @@ def parse_version_name(line): cal.section_note("https://docs.redis.com/latest/rs/administering/product-lifecycle/#endoflife-schedule") # https://endoflife.date/redis redis_releases = [ - ('5.6', 2020, 4, 2021, 10), - ('6.0', 2020, 5, 2022, 5), - ('6.2', 2021, 8, 2024, 4), - ('7.0', 2022, 4, 2025, 4), + ('6.0', 2020, 5, 2023, 8), + ('6.2', 2021, 8, 2024, 8), + ('7.0', 2022, 4, 2024, 7), + ('7.2', 2023, 8, 2024, 8), + ('7.4', 2024, 7, 2025, 7), ] for name, syear, smonth, eyear, emonth in redis_releases: + eyear, emonth = validate_version_date("Redis", name, eyear, emonth) cal.bar( f"Redis {name}", start=(syear, smonth), @@ -604,13 +711,16 @@ def parse_version_name(line): ruby_releases = [ #('2.3', 2015, 12, 2019, 3), #('2.4', 2016, 12, 2020, 3), - ('2.5', 2017, 12, 2021, 3), - ('2.6', 2018, 12, 2022, 3), + #('2.5', 2017, 12, 2021, 3), + #('2.6', 2018, 12, 2022, 3), ('2.7', 2019, 12, 2023, 3), ('3.0', 2020, 12, 2024, 3), ('3.1', 2021, 12, 2025, 3), + ('3.2', 2022, 12, 2026, 3), + ('3.3', 2023, 12, 2027, 3), ] for name, syear, smonth, eyear, emonth, *more in ruby_releases: + eyear, emonth = validate_version_date("Ruby", name, eyear, emonth) cal.bar( f"Ruby {name}", start=(syear, smonth), diff --git a/catalog-info.yaml b/catalog-info.yaml new file mode 100644 index 00000000..5442ce33 --- /dev/null +++ b/catalog-info.yaml @@ -0,0 +1,32 @@ +# This file records information about this repo. Its use is described in OEP-55: +# https://open-edx-proposals.readthedocs.io/en/latest/processes/oep-0055-proc-project-maintainers.html + +apiVersion: backstage.io/v1alpha1 +# (Required) Acceptable Values: Component, Resource, System +# Use `Component` unless you know how backstage works and what the other kinds mean. +kind: Component +metadata: + # (Required) Must be the name of the repo, without the owning organization. + name: 'repo-tools' + description: "Tools for repo maintenance, etc." + links: + + annotations: + # (Optional) We use the below annotation to indicate whether or not this + # repository should be tagged for openedx releases and which branch is tagged. + openedx.org/release: null +spec: + + # (Required) This can be a group (`group:`) or a user (`user:`). + # Don't forget the "user:" or "group:" prefix. Groups must be GitHub team + # names in the openedx GitHub organization: https://github.com/orgs/openedx/teams + # + # If you need a new team created, create an issue with Axim engineering: + # https://github.com/openedx/axim-engineering/issues/new/choose + owner: group:axim-engineering + + # (Required) Acceptable Type Values: service, website, library + type: 'library' + + # (Required) Acceptable Lifecycle Values: experimental, production, deprecated + lifecycle: 'production' diff --git a/edx_repo_tools/__init__.py b/edx_repo_tools/__init__.py index 03ab978d..2e79725c 100644 --- a/edx_repo_tools/__init__.py +++ b/edx_repo_tools/__init__.py @@ -1,2 +1,2 @@ -__version__ = '0.8.2' +__version__ = '2.0.0' diff --git a/edx_repo_tools/audit_gh_users/README.rst b/edx_repo_tools/audit_gh_users/README.rst new file mode 100644 index 00000000..879550f0 --- /dev/null +++ b/edx_repo_tools/audit_gh_users/README.rst @@ -0,0 +1,45 @@ +Audit GitHub Users +################## + +This script will compare the list of users in a github org against a list of +users in a CSV and tell you which github users are not listed in the CSV. + +CSV Location and Format +*********************** + +The CSV is expected to be in a GitHub repo and it should contain a column name +"GitHub Username" that contains a GitHub username. + +Usage +***** + +You will need a GH pesonal access token with the following scopes: + +* read:org +* repo + +First, set up repo-tools as described in `the root README <../../README.rst>`_. +There are a few ways to do this; one way is:: + + export GITHUB_TOKEN="$(pass github-token)" # assumes you have passwordstore.org + + python3 -m venv venv + . venv/bin/activate + pip install -e .[audit_gh_users] + +Then, run the script:: + + audit_users + +Contributing +************ + +* Make changes on your branch. + +* CI will run tests for you, but not linting, so ensure your changes don't break pylint: ``pylint edx_repo_tools/audit_users``. + +* Ping `#ask-axim`__ on Slack for review. + +__ https://openedx.slack.com/archives/C0497NQCLBT + +* Once approved, apply and merge (non-Axim engineers: ask your Axim reviewer to do this part for you). diff --git a/edx_repo_tools/audit_gh_users/__init__.py b/edx_repo_tools/audit_gh_users/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/edx_repo_tools/audit_gh_users/audit_users.py b/edx_repo_tools/audit_gh_users/audit_users.py new file mode 100644 index 00000000..86b4ac64 --- /dev/null +++ b/edx_repo_tools/audit_gh_users/audit_users.py @@ -0,0 +1,72 @@ +""" +Audit github users in an org. Comparing the list of users to those in a CSV. + +See the README for more info. +""" + +import base64 +import csv +import io +from itertools import chain +import click +from ghapi.all import GhApi, paged + + +@click.command() +@click.option( + "--github-token", + "_github_token", + envvar="GITHUB_TOKEN", + required=True, + help="A github personal access token.", +) +@click.option( + "--org", + "org", + default="openedx", + help="The github org that you wish check.", +) +@click.option( + "--csv-repo", + "csv_repo", + default="openedx-webhooks-data", + help="The github repo that contains the CSV we should compare against.", +) +@click.option( + "--csv-path", + "csv_path", + default="salesforce-export.csv", + help="The path in the repo to the csv file. The file should contain a 'GitHub Username' column.", +) +def main(org, _github_token, csv_repo, csv_path): + """ + Entry point for command-line invocation. + """ + api = GhApi() + + # Get all github users in the org. + current_org_users = [ + member.login + for member in chain.from_iterable( + paged(api.orgs.list_members, org, per_page=100) + ) + ] + + # Get all github usernames from openedx-webhooks-data/salesforce-export.csv + csv_file = io.StringIO( + base64.decodebytes( + api.repos.get_content(org, csv_repo, csv_path).content.encode() + ).decode("utf-8") + ) + reader = csv.DictReader(csv_file) + csv_github_users = [row["GitHub Username"] for row in reader] + + # Find all the people that are in the org but not in sales force. + extra_org_users = set(current_org_users) - set(csv_github_users) + + # List the users we need to investigate + print("\n".join(sorted(extra_org_users))) + + +if __name__ == "__main__": + main() # pylint: disable=no-value-for-parameter diff --git a/edx_repo_tools/audit_gh_users/extra.in b/edx_repo_tools/audit_gh_users/extra.in new file mode 100644 index 00000000..d10f28bc --- /dev/null +++ b/edx_repo_tools/audit_gh_users/extra.in @@ -0,0 +1,4 @@ +-c ../../requirements/constraints.txt + +click +ghapi diff --git a/edx_repo_tools/audit_gh_users/extra.txt b/edx_repo_tools/audit_gh_users/extra.txt new file mode 100644 index 00000000..8bf29925 --- /dev/null +++ b/edx_repo_tools/audit_gh_users/extra.txt @@ -0,0 +1,19 @@ +# +# This file is autogenerated by pip-compile with Python 3.12 +# by the following command: +# +# make upgrade +# +click==8.1.7 + # via -r edx_repo_tools/audit_gh_users/extra.in +fastcore==1.5.54 + # via ghapi +ghapi==1.0.5 + # via -r edx_repo_tools/audit_gh_users/extra.in +packaging==24.1 + # via + # fastcore + # ghapi + +# The following packages are considered to be unsafe in a requirements file: +# pip diff --git a/edx_repo_tools/codemods/node16/gha_release_workflow_modernizer.py b/edx_repo_tools/codemods/node16/gha_release_workflow_modernizer.py index 8d134f38..283e1d59 100644 --- a/edx_repo_tools/codemods/node16/gha_release_workflow_modernizer.py +++ b/edx_repo_tools/codemods/node16/gha_release_workflow_modernizer.py @@ -7,7 +7,7 @@ from pathlib import Path import click -from ruamel.yaml.main import round_trip_load as yaml_load +from ruamel.yaml import YAML from edx_repo_tools.utils import YamlLoader @@ -26,7 +26,8 @@ def _does_nvmrc_exists(self): def _add_setup_nodejs_env_step(self, step_elements, step_index): if self._does_nvmrc_exists(): - fetch_node_version_step = yaml_load(FETCH_NODE_VERSION_STEP) + yaml = YAML() + fetch_node_version_step = yaml.load(FETCH_NODE_VERSION_STEP) step_elements.insert( step_index, fetch_node_version_step) return step_elements diff --git a/edx_repo_tools/codemods/python312/__init__.py b/edx_repo_tools/codemods/python312/__init__.py new file mode 100644 index 00000000..e73ee6dd --- /dev/null +++ b/edx_repo_tools/codemods/python312/__init__.py @@ -0,0 +1,2 @@ +from .tox_modernizer import ConfigReader, ToxModernizer +from .gh_actions_modernizer import GithubCIModernizer diff --git a/edx_repo_tools/codemods/python312/gh_actions_modernizer.py b/edx_repo_tools/codemods/python312/gh_actions_modernizer.py new file mode 100644 index 00000000..66b8aa93 --- /dev/null +++ b/edx_repo_tools/codemods/python312/gh_actions_modernizer.py @@ -0,0 +1,90 @@ +""" +Github Actions CI Modernizer to add Python 3.12 and drop Django 3.2 testing +""" +import os +from copy import deepcopy +import click +from edx_repo_tools.utils import YamlLoader + +TO_BE_REMOVED_PYTHON = ['3.5', '3.6', '3.7'] +ALLOWED_PYTHON_VERSIONS = ['3.8', '3.12'] + +ALLOWED_DJANGO_VERSIONS = ['4.2', 'django42'] +DJANGO_ENV_TO_ADD = ['django42'] +DJANGO_ENV_TO_REMOVE = ['django32', 'django40', 'django41'] + + +class GithubCIModernizer(YamlLoader): + def __init__(self, file_path): + super().__init__(file_path) + + def _update_python_and_django_in_matrix(self): + django_versions = list() + python_versions = list() + matrix_elements = dict() + + + for section_key in self.elements['jobs']: + matrix_elements = deepcopy(self.elements['jobs'][section_key]['strategy']['matrix']) + + for key, value in matrix_elements.items(): + if key == 'django-version': + for dj_version in DJANGO_ENV_TO_ADD: + if dj_version not in value: + value.append(dj_version) + django_versions = list(filter(lambda version: version in ALLOWED_DJANGO_VERSIONS, value)) + if django_versions: + self.elements['jobs'][section_key]['strategy']['matrix'][key] = django_versions + + if key in ['tox', 'toxenv', 'tox-env']: + for dj_env in DJANGO_ENV_TO_ADD: + if dj_env not in value: + value.append(dj_env) + tox_envs = list(filter(lambda version: version not in DJANGO_ENV_TO_REMOVE, value)) + if tox_envs: + self.elements['jobs'][section_key]['strategy']['matrix'][key] = tox_envs + + if key == 'python-version': + for version in ALLOWED_PYTHON_VERSIONS: + if version not in value: + value.append(version) + python_versions = list(filter(lambda version: version not in TO_BE_REMOVED_PYTHON, value)) + if python_versions: + self.elements['jobs'][section_key]['strategy']['matrix'][key] = python_versions + else: + del self.elements['jobs'][section_key]['strategy']['matrix'][key] + + elif key in ['include', 'exclude']: + allowed_python_vers = list() + for item in value: + if item['python-version'] not in TO_BE_REMOVED_PYTHON: + allowed_python_vers.append(item) + + if len(allowed_python_vers): + self.elements['jobs'][section_key]['strategy']['matrix'][key] = allowed_python_vers + else: + del self.elements['jobs'][section_key]['strategy']['matrix'][key] + + + def _update_github_actions(self): + self._update_python_and_django_in_matrix() + + def modernize(self): + self._update_github_actions() + self.update_yml_file() + + +@click.command() +@click.option( + '--path', default='.github/workflows/ci.yml', + help="Path to default CI workflow file") +def main(path): + if os.path.exists(path): + modernizer = GithubCIModernizer(path) + modernizer.modernize() + else: + print("ci.yml not found on specified path") + + +if __name__ == '__main__': + main() diff --git a/edx_repo_tools/codemods/python312/tox_modernizer.py b/edx_repo_tools/codemods/python312/tox_modernizer.py new file mode 100644 index 00000000..41bf0c43 --- /dev/null +++ b/edx_repo_tools/codemods/python312/tox_modernizer.py @@ -0,0 +1,122 @@ +import io +import os +import re +from configparser import ConfigParser, NoSectionError + +import click + +TOX_SECTION = "tox" +ENVLIST = "envlist" +TEST_ENV_SECTION = "testenv" +TEST_ENV_DEPS = "deps" +PYTHON_SUBSTITUTE = "py{38, 312}" +DJANGO_SUBSTITUTE = "django{42}" + +DJANGO_42_DEPENDENCY = "django42: Django>=4.2,<4.3\n" +NEW_DJANGO_DEPENDENCIES = DJANGO_42_DEPENDENCY + +SECTIONS = [TOX_SECTION, TEST_ENV_SECTION] + +PYTHON_PATTERN = "(py{.*?}-?|py[0-9]+,|py[0-9]+-)" + +DJANGO_PATTERN = "(django[0-9]+,|django[0-9]+\n|django{.*}\n|django{.*?}|django[0-9]+-|django{.*}-)" + +DJANGO_DEPENDENCY_PATTERN = "([^\n]*django[0-9]+:.*\n?)" + + +class ConfigReader: + def __init__(self, file_path=None, config_dict=None): + self.config_dict = config_dict + self.file_path = file_path + + def get_modernizer(self): + config_parser = ConfigParser() + if self.config_dict is not None: + config_parser.read_dict(self.config_dict) + else: + config_parser.read(self.file_path) + return ToxModernizer(config_parser, self.file_path) + + +class ToxModernizer: + def __init__(self, config_parser, file_path): + self.file_path = file_path + self.config_parser = config_parser + self._validate_tox_config_sections() + + def _validate_tox_config_sections(self): + if not self.config_parser.sections(): + raise NoSectionError("Bad Config. No sections found.") + + if all(section not in SECTIONS for section in self.config_parser.sections()): + raise NoSectionError("File doesn't contain required sections") + + def _update_env_list(self): + tox_section = self.config_parser[TOX_SECTION] + env_list = tox_section[ENVLIST] + + env_list = ToxModernizer._replace_runners(PYTHON_PATTERN, PYTHON_SUBSTITUTE, env_list) + env_list = ToxModernizer._replace_runners(DJANGO_PATTERN, DJANGO_SUBSTITUTE, env_list) + self.config_parser[TOX_SECTION][ENVLIST] = env_list + + @staticmethod + def _replace_runners(pattern, substitute, env_list): + matches = re.findall(pattern, env_list) + if not matches: + return env_list + substitute = ToxModernizer._get_runner_substitute(matches, substitute) + return ToxModernizer._replace_matches(pattern, substitute, env_list, matches) + + @staticmethod + def _replace_matches(pattern, substitute, target, matches): + if not matches: + return target + occurrences_to_replace = len(matches) - 1 + if occurrences_to_replace > 0: + target = re.sub(pattern, '', target, occurrences_to_replace) + target = re.sub(pattern, substitute, target) + return target + + @staticmethod + def _get_runner_substitute(matches, substitute): + last_match = matches[-1] + has_other_runners = last_match.endswith('-') + return substitute + "-" if has_other_runners else substitute + + def _replace_django_versions(self): + test_environment = self.config_parser[TEST_ENV_SECTION] + dependencies = test_environment[TEST_ENV_DEPS] + matches = re.findall(DJANGO_DEPENDENCY_PATTERN, dependencies) + dependencies = self._replace_matches(DJANGO_DEPENDENCY_PATTERN, NEW_DJANGO_DEPENDENCIES, dependencies, matches) + + self.config_parser[TEST_ENV_SECTION][TEST_ENV_DEPS] = dependencies + + def _update_config_file(self): + # ConfigParser insists on using tabs for output. We want spaces. + with io.StringIO() as configw: + self.config_parser.write(configw) + new_ini = configw.getvalue() + new_ini = new_ini.replace("\t", " ") + with open(self.file_path, 'w') as configfile: + configfile.write(new_ini) + + def modernize(self): + self._update_env_list() + self._replace_django_versions() + self._update_config_file() + + +@click.command() +@click.option( + '--path', default='tox.ini', + help="Path to target tox config file") +def main(path): + if os.path.exists(path): + modernizer = ConfigReader(path).get_modernizer() + modernizer.modernize() + else: + print("tox.ini not found on specified path") + + +if __name__ == '__main__': + main() diff --git a/edx_repo_tools/conventional_commits/extra.txt b/edx_repo_tools/conventional_commits/extra.txt index e29d53ba..5a8004e0 100644 --- a/edx_repo_tools/conventional_commits/extra.txt +++ b/edx_repo_tools/conventional_commits/extra.txt @@ -1,67 +1,59 @@ # -# This file is autogenerated by pip-compile with Python 3.8 +# This file is autogenerated by pip-compile with Python 3.12 # by the following command: # # make upgrade # -alembic==1.11.3 +alembic==1.13.2 # via dataset banal==1.0.6 # via dataset -contourpy==1.1.0 +contourpy==1.2.1 # via matplotlib -cycler==0.11.0 +cycler==0.12.1 # via matplotlib dataset==1.6.2 # via -r edx_repo_tools/conventional_commits/extra.in -fonttools==4.42.1 +fonttools==4.53.1 # via matplotlib -greenlet==2.0.2 - # via sqlalchemy -importlib-metadata==6.8.0 - # via alembic -importlib-resources==6.0.1 +greenlet==3.0.3 # via - # alembic - # matplotlib + # -c edx_repo_tools/conventional_commits/../../requirements/constraints.txt + # sqlalchemy kiwisolver==1.4.5 # via matplotlib -mako==1.2.4 +mako==1.3.5 # via alembic -markupsafe==2.1.3 +markupsafe==2.1.5 # via mako -matplotlib==3.7.2 +matplotlib==3.9.1 # via -r edx_repo_tools/conventional_commits/extra.in -numpy==1.24.4 +numpy==2.0.1 # via # contourpy # matplotlib # pandas -packaging==23.1 +packaging==24.1 # via matplotlib -pandas==2.0.3 +pandas==2.2.2 # via -r edx_repo_tools/conventional_commits/extra.in -pillow==10.0.0 +pillow==10.4.0 # via matplotlib -pyparsing==3.0.9 +pyparsing==3.1.2 # via matplotlib -python-dateutil==2.8.2 +python-dateutil==2.9.0.post0 # via # matplotlib # pandas -pytz==2023.3 +pytz==2024.1 # via pandas six==1.16.0 # via python-dateutil -sqlalchemy==1.4.49 +sqlalchemy==1.4.52 # via # alembic # dataset -typing-extensions==4.7.1 +typing-extensions==4.12.2 # via alembic -tzdata==2023.3 +tzdata==2024.1 # via pandas -zipp==3.16.2 - # via - # importlib-metadata - # importlib-resources diff --git a/edx_repo_tools/data.py b/edx_repo_tools/data.py index 87a051fa..9c21cd22 100644 --- a/edx_repo_tools/data.py +++ b/edx_repo_tools/data.py @@ -6,7 +6,6 @@ logging.basicConfig() LOGGER = logging.getLogger(__name__) -OPEN_EDX_YAML = 'openedx.yaml' def iter_nonforks(hub, orgs): @@ -28,9 +27,9 @@ def iter_nonforks(hub, orgs): yield repo -def iter_openedx_yaml(hub, orgs, branches=None): +def iter_openedx_yaml(file_name, hub, orgs, branches=None): """ - Yield the data from all openedx.yaml files found in repositories in ``orgs`` + Yield the data from all catalog-info.yaml or openedx.yaml files found in repositories in ``orgs`` on any of ``branches``. Arguments: @@ -45,19 +44,20 @@ def iter_openedx_yaml(hub, orgs, branches=None): Repositories (:class:`~github3.Repository) """ + for repo in iter_nonforks(hub, orgs): for branch in (branches or [repo.default_branch]): try: - contents = repo.file_contents(OPEN_EDX_YAML, ref=branch) + contents = repo.file_contents(file_name, ref=branch) except NotFoundError: contents = None if contents is not None: - LOGGER.debug("Found openedx.yaml at %s:%s", repo.full_name, branch) + LOGGER.debug("Found %s at %s:%s", file_name, repo.full_name, branch) try: data = yaml.safe_load(contents.decoded) except Exception as exc: - LOGGER.error("Couldn't parse openedx.yaml from %s:%s, skipping repo", repo.full_name, branch, exc_info=True) + LOGGER.error("Couldn't parse %s from %s:%s, skipping repo", file_name, repo.full_name, branch, exc_info=True) else: if data is not None: yield repo, data diff --git a/edx_repo_tools/find_dependencies/extra.in b/edx_repo_tools/find_dependencies/extra.in index b5daba6c..9dbe9980 100644 --- a/edx_repo_tools/find_dependencies/extra.in +++ b/edx_repo_tools/find_dependencies/extra.in @@ -4,3 +4,4 @@ rich requests +requirements-parser \ No newline at end of file diff --git a/edx_repo_tools/find_dependencies/extra.txt b/edx_repo_tools/find_dependencies/extra.txt index 7fd5da4f..23268a52 100644 --- a/edx_repo_tools/find_dependencies/extra.txt +++ b/edx_repo_tools/find_dependencies/extra.txt @@ -1,26 +1,28 @@ # -# This file is autogenerated by pip-compile with Python 3.8 +# This file is autogenerated by pip-compile with Python 3.12 # by the following command: # # make upgrade # -certifi==2023.7.22 +certifi==2024.7.4 # via requests -charset-normalizer==3.2.0 +charset-normalizer==3.3.2 # via requests -idna==3.4 +idna==3.7 # via requests markdown-it-py==3.0.0 # via rich mdurl==0.1.2 # via markdown-it-py -pygments==2.16.1 +pygments==2.18.0 # via rich -requests==2.31.0 +requests==2.32.3 # via -r edx_repo_tools/find_dependencies/extra.in -rich==13.5.2 +requirements-parser==0.10.1 # via -r edx_repo_tools/find_dependencies/extra.in -typing-extensions==4.7.1 - # via rich -urllib3==2.0.4 +rich==13.7.1 + # via -r edx_repo_tools/find_dependencies/extra.in +types-setuptools==71.1.0.20240723 + # via requirements-parser +urllib3==2.2.2 # via requests diff --git a/edx_repo_tools/find_dependencies/find_dependencies.py b/edx_repo_tools/find_dependencies/find_dependencies.py index 04dd64f2..3148ae4a 100644 --- a/edx_repo_tools/find_dependencies/find_dependencies.py +++ b/edx_repo_tools/find_dependencies/find_dependencies.py @@ -245,6 +245,8 @@ def repo_url_from_tgz(tgz_path: str) -> Optional[str]: r"(?i)^Project-URL: Source.*,\s*(.*)$", r"(?i)^Home-page: (.*)$", r"(?i)^Project-URL: Home.*,\s*(.*)$", + # If they point to GitHub issues, that's their repo. + r"(?i)^Project-URL: [^,]+,\s*(https?://github.com/[^/]+/[^/]+)/issues/?$", # If we can't find a URL marked as home, then use any GitHub repo URL. r"(?i)^Project-URL: [^,]+,\s*(https?://github.com/[^/]+/[^/]+)$", ] diff --git a/edx_repo_tools/find_dependencies/find_python_dependencies.py b/edx_repo_tools/find_dependencies/find_python_dependencies.py new file mode 100644 index 00000000..38ad4a41 --- /dev/null +++ b/edx_repo_tools/find_dependencies/find_python_dependencies.py @@ -0,0 +1,82 @@ +""" +Spider and catalog dependencies. +$ python find_python_dependencies.py --req-file $FILE_PATH +""" + +import click +import json +import os +import requirements +import sys +from pathlib import Path +import requests + + +# The first of these we find is the requirements file we'll examine: +def request_package_info_url(package): + base_url = "https://pypi.org/pypi/" + url = f"{base_url}{package}/json" + response = requests.get(url) + if response.status_code == 200: + data_dict = response.json() + info = data_dict["info"] + return info["home_page"] + else: + print(f"Failed to retrieve data for package {package}. Status code:", response.status_code) + +FIRST_PARTY_ORGS = ["openedx"] + +SECOND_PARTY_ORGS = [ + "edx", "edx-unsupported", "edx-solutions", + "mitodl", + "overhangio", + "open-craft", "eduNEXT", "raccoongang", +] + +def urls_in_orgs(urls, orgs): + """ + Find urls that are in any of the `orgs`. + """ + return sorted( + url for url in urls + if any(f"/{org}/" in url for org in orgs) + ) + +@click.command() +@click.option( + '--req-file', 'directories', + multiple=True, + required=True, + help="The absolute file paths to locate Python dependencies" + "within a particular repository. You can provide this " + "option multiple times to include multiple requirement files.", +) +@click.option( + '--ignore', 'ignore_paths', + multiple=True, + help="Dependency Repo URL to ignore even if it's" + "outside of your organization's approved list", +) + +def main(directories=None, ignore_paths=None): + """ + Analyze the requirements in input directory mentioned on the command line. + """ + + home_page = set() + for directory in directories: + with open(directory) as fbase: + for req in requirements.parse(fbase): + url = request_package_info_url(req.name) + if url is not None: + home_page.add(url) + + packages_urls = set(urls_in_orgs(home_page, SECOND_PARTY_ORGS)) + + if diff:= packages_urls.symmetric_difference(set(ignore_paths)): + print("The following packages are from 2nd party orgs and should not be added as a core dependency, they can be added as an optional dependency operationally or they can be transferred to the openedx org before they are included:") + print("\n".join(diff)) + exit(1) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/edx_repo_tools/oep2/explode_repos_yaml.py b/edx_repo_tools/oep2/explode_repos_yaml.py index f83b1ad5..c96dac99 100644 --- a/edx_repo_tools/oep2/explode_repos_yaml.py +++ b/edx_repo_tools/oep2/explode_repos_yaml.py @@ -27,7 +27,7 @@ ) def implode(hub, org, branch): """ - Implode all openedx.yaml files, and print the results as formatted output. + Implode all catalog-info.yaml or openedx.yaml files, and print the results as formatted output. """ data = { repo.full_name: openedx_yaml diff --git a/edx_repo_tools/oep2/report/cli.py b/edx_repo_tools/oep2/report/cli.py index 1adc45e0..f16a3fd3 100644 --- a/edx_repo_tools/oep2/report/cli.py +++ b/edx_repo_tools/oep2/report/cli.py @@ -3,7 +3,7 @@ """ import logging -import pkg_resources +import importlib.resources import sys import click @@ -33,7 +33,7 @@ def cli(hub, trace, pytest_args): """ args = [ '--pyargs', 'edx_repo_tools.oep2.checks', - '-c', pkg_resources.resource_filename(__name__, 'oep2-report.ini'), + '-c', importlib.resources.files(__name__) / 'oep2-report.ini', ] if trace: diff --git a/edx_repo_tools/pull_request_creator/__init__.py b/edx_repo_tools/pull_request_creator/__init__.py new file mode 100644 index 00000000..d9e9d281 --- /dev/null +++ b/edx_repo_tools/pull_request_creator/__init__.py @@ -0,0 +1,668 @@ +""" +Class helps create GitHub Pull requests +""" +# pylint: disable=missing-class-docstring,missing-function-docstring,attribute-defined-outside-init +import logging +import re +import os +import time +from ast import literal_eval + +import click +import requests +from git import Git, Repo +from github import Github, GithubObject, InputGitAuthor, InputGitTreeElement +from packaging.version import Version + + +logging.basicConfig() +logger = logging.getLogger() +logger.setLevel(logging.INFO) + + +@click.command() +@click.option( + '--repo-root', + type=click.Path(exists=True, dir_okay=True, file_okay=False), + required=True, + help="Directory containing local copy of repository" +) +@click.option( + '--base-branch-name', + required=True, + help="Base name for branch to create. Full branch name will be like repo-tools/BASENAME-1234567." +) +@click.option( + '--target-branch', + required=False, + default="master", + help="Target branch against which we have to open a PR", +) +@click.option('--commit-message', required=True) +@click.option('--pr-title', required=True) +@click.option('--pr-body', required=True) +@click.option( + '--user-reviewers', + default='', + help="Comma separated list of Github users to be tagged on pull requests" +) +@click.option( + '--team-reviewers', + default='', + help=("Comma separated list of Github teams to be tagged on pull requests. " + "NOTE: Teams must have explicit write access to the repo, or " + "Github will refuse to tag them.") +) +@click.option( + '--delete-old-pull-requests/--no-delete-old-pull-requests', + default=True, + help="If set, delete old branches with the same base branch name and close their PRs" +) +@click.option( + '--force-delete-old-prs/--no-force-delete-old-prs', + default=False, + help="If set, force delete old branches with the same base branch name and close their PRs" +) +@click.option( + '--draft', is_flag=True +) +@click.option( + '--output-pr-url-for-github-action/--no-output-pr-url-for-github-action', + default=False, + help="If set, print resultant PR in github action set output sytax" +) +@click.option( + '--untracked-files-required', + required=False, + help='If set, add Untracked files to the PR as well' +) +def main( + repo_root, base_branch_name, target_branch, + commit_message, pr_title, pr_body, + user_reviewers, team_reviewers, + delete_old_pull_requests, draft, output_pr_url_for_github_action, + untracked_files_required, force_delete_old_prs +): + """ + Create a pull request with these changes in the repo. + + Required environment variables: + + - GITHUB_TOKEN + - GITHUB_USER_EMAIL + """ + creator = PullRequestCreator( + repo_root=repo_root, + branch_name=base_branch_name, + target_branch=target_branch, + commit_message=commit_message, + pr_title=pr_title, pr_body=pr_body, + user_reviewers=user_reviewers, + team_reviewers=team_reviewers, + draft=draft, + output_pr_url_for_github_action=output_pr_url_for_github_action, + force_delete_old_prs=force_delete_old_prs + ) + creator.create(delete_old_pull_requests, untracked_files_required) + + +class GitHubHelper: # pylint: disable=missing-class-docstring + + def __init__(self): + self._set_github_token() + self._set_user_email() + self._set_github_instance() + self.AUTOMERGE_ACTION_VAR = 'AUTOMERGE_PYTHON_DEPENDENCIES_UPGRADES_PR' + + # FIXME: Does nothing, sets variable to None if env var missing + def _set_github_token(self): + try: + self.github_token = os.environ.get('GITHUB_TOKEN') + except Exception as error: + raise Exception( + "Could not find env variable GITHUB_TOKEN. " + "Please make sure the variable is set and try again." + ) from error + + # FIXME: Does nothing, sets variable to None if env var missing + def _set_user_email(self): + try: + self.github_user_email = os.environ.get('GITHUB_USER_EMAIL') + except Exception as error: + raise Exception( + "Could not find env variable GITHUB_USER_EMAIL. " + "Please make sure the variable is set and try again." + ) from error + + def _set_github_instance(self): + try: + self.github_instance = Github(self.github_token) + except Exception as error: + raise Exception( + "Failed connecting to Github. " + + "Please make sure the github token is accurate and try again." + ) from error + + def _add_reason(self, req, reason): + req['reason'] = reason + return req + + def _add_comment_about_reqs(self, pr, summary, reqs): + separator = "\n" + pr.create_issue_comment( + f"{summary}.
\n {separator.join(self.make_readable_string(req) for req in reqs)}" + ) + + def get_github_instance(self): + return self.github_instance + + def get_github_token(self): + return self.github_token + + # FIXME: Probably can end up picking repo from wrong org if two + # repos have the same name in different orgs. + # + # Use repo_from_remote instead, and delete this when no longer in use. + def connect_to_repo(self, github_instance, repo_name): + """ + Get the repository object of the desired repo. + """ + repos_list = github_instance.get_user().get_repos() + for repo in repos_list: + if repo.name == repo_name: + return repo + + raise Exception( + "Could not connect to the repository: {}. " + "Please make sure you are using the correct " + "credentials and try again.".format(repo_name) + ) + + def repo_from_remote(self, repo_root, remote_name_allow_list=None): + """ + Get the repository object for a repository with a Github remote. + + Optionally restrict the remotes under consideration by passing a list + of names as``remote_name_allow_list``, e.g. ``['origin']``. + """ + patterns = [ + r"git@github\.com:(?P[^/?#]+/[^/?#]+?).git", + # Non-greedy match for repo name so that optional .git on + # end is not included in repo name match. + r"https?://(www\.)?github\.com/(?P[^/?#]+/[^/?#]+?)(/|\.git)?" + ] + for remote in Repo(repo_root).remotes: + if remote_name_allow_list and remote.name not in remote_name_allow_list: + continue + for url in remote.urls: + for pattern in patterns: + m = re.fullmatch(pattern, url) + if m: + fullname = m.group('name') + logger.info("Discovered repo %s in remotes", fullname) + return self.github_instance.get_repo(fullname) + raise Exception("Could not find a Github URL among repo's remotes") + + def branch_exists(self, repository, branch_name): + """ + Checks to see if this branch name already exists + """ + try: + repository.get_branch(branch_name) + except: # pylint: disable=bare-except + return False + return True + + def get_current_commit(self, repo_root): + """ + Get current commit ID of repo at repo_root. + """ + return Git(repo_root).rev_parse('HEAD') + + def get_updated_files_list(self, repo_root, untracked_files_required=False): + """ + Use the Git library to run the ls-files command to find + the list of files updated. + """ + git_instance = Git(repo_root) + git_instance.init() + + if untracked_files_required: + modified_files = git_instance.ls_files("--modified") + untracked_files = git_instance.ls_files("--others", "--exclude-standard") + updated_files = modified_files + '\n' + untracked_files \ + if (len(modified_files) > 0 and len(untracked_files) > 0) \ + else modified_files + untracked_files + + else: + updated_files = git_instance.ls_files("--modified") + + if len(updated_files) > 0: + return updated_files.split("\n") + else: + return [] + + def create_branch(self, repository, branch_name, sha): + """ + Create a new branch with the given sha as its head. + """ + try: + branch_object = repository.create_git_ref(branch_name, sha) + except Exception as error: + raise Exception( + "Unable to create git branch: {}. " + "Check to make sure this branch doesn't already exist.".format(branch_name) + ) from error + return branch_object + + def close_existing_pull_requests(self, repository, user_login, user_name, target_branch='master', + branch_name_filter=None): + """ + Close any existing PR's by the bot user in this PR. This will help + reduce clutter, since any old PR's will be obsolete. + If function branch_name_filter is specified, it will be called with + branch names of PRs. The PR will only be closed when the function + returns true. + """ + pulls = repository.get_pulls(state="open") + deleted_pull_numbers = [] + for pr in pulls: + user = pr.user + if user.login == user_login and user.name == user_name and pr.base.ref == target_branch: + branch_name = pr.head.ref + if branch_name_filter and not branch_name_filter(branch_name): + continue + logger.info("Deleting PR: #%s", pr.number) + pr.create_issue_comment("Closing obsolete PR.") + pr.edit(state="closed") + deleted_pull_numbers.append(pr.number) + + self.delete_branch(repository, branch_name) + return deleted_pull_numbers + + def create_pull_request(self, repository, title, body, base, head, user_reviewers=GithubObject.NotSet, + team_reviewers=GithubObject.NotSet, verify_reviewers=True, draft=False): + """ + Create a new pull request with the changes in head. And tag a list of teams + for a review. + """ + try: + pull_request = repository.create_pull( + title=title, + body=body, + base=base, + head=head, + draft=draft + ) + except Exception as e: + raise e + + try: + any_reviewers = (user_reviewers is not GithubObject.NotSet or team_reviewers is not GithubObject.NotSet) + if any_reviewers: + logger.info("Tagging reviewers: users=%s and teams=%s", user_reviewers, team_reviewers) + pull_request.create_review_request( + reviewers=user_reviewers, + team_reviewers=team_reviewers, + ) + if verify_reviewers: + # Sometimes GitHub can't find the pull request we just made. + # Try waiting a moment before asking about it. + time.sleep(5) + self.verify_reviewers_tagged(pull_request, user_reviewers, team_reviewers) + + except Exception as e: + raise Exception( + "Some reviewers could not be tagged on new PR " + "https://github.com/{}/pull/{}".format(repository.full_name, pull_request.number) + ) from e + + # Do this in upgrade PRs only + if pull_request.title == 'chore: Upgrade Python requirements': + self.verify_upgrade_packages(pull_request) + + return pull_request + + def verify_reviewers_tagged(self, pull_request, requested_users, requested_teams): + """ + Assert if the reviewers we requested were tagged on the PR for review. + + Considerations: + + - Teams cannot be tagged on a PR unless that team has explicit write + access to the repo. + - Github may have independently tagged additional users or teams + based on a CODEOWNERS file. + """ + tagged_for_review = pull_request.get_review_requests() + + tagged_users = [user.login for user in tagged_for_review[0]] + if not (requested_users is GithubObject.NotSet or set(requested_users) <= set(tagged_users)): + logger.info("User taggging failure: Requested %s, actually tagged %s", requested_users, tagged_users) + raise Exception('Some of the requested reviewers were not tagged on PR for review') + + tagged_teams = [team.name for team in tagged_for_review[1]] + if not (requested_teams is GithubObject.NotSet or set(requested_teams) <= set(tagged_teams)): + logger.info("Team taggging failure: Requested %s, actually tagged %s", requested_teams, tagged_teams) + raise Exception('Some of the requested teams were not tagged on PR for review') + + def verify_upgrade_packages(self, pull_request): + """ + Iterate on pull request diff and parse the packages and check the versions. + If all versions are upgrading then add a label ready for auto merge. In case of any downgrade package + add a comment on PR. + """ + location = None + location = pull_request._headers['location'] # pylint: disable=protected-access + logger.info(location) + + if not location: + return + + logger.info('Hitting pull request for difference') + headers = {"Accept": "application/vnd.github.v3.diff", "Authorization": f'Bearer {self.github_token}'} + + load_content = requests.get(location, headers=headers, timeout=5) + txt = '' + time.sleep(3) + logger.info(load_content.status_code) + + if load_content.status_code == 200: + txt = load_content.content.decode('utf-8') + valid_reqs, suspicious_reqs = self.compare_pr_differnce(txt) + + self._add_comment_about_reqs(pull_request, "List of packages in the PR without any issue", valid_reqs) + + if not suspicious_reqs and valid_reqs: + if self.check_automerge_variable_value(location): + pull_request.set_labels('Ready to Merge') + logger.info("Total valid upgrades are %s", valid_reqs) + else: + self._add_comment_about_reqs(pull_request, "These Packages need manual review.", suspicious_reqs) + + else: + logger.info("No package available for comparison.") + + def check_automerge_variable_value(self, location): + """ + Check whether repository has the `AUTOMERGE_PYTHON_DEPENDENCIES_UPGRADES_PR` variable + with `True` value exists. + """ + link = location.split('pulls') + get_repo_variable = link[0] + 'actions/variables/' + self.AUTOMERGE_ACTION_VAR + logger.info('Hitting repository to check AUTOMERGE_ACTION_VAR settings.') + + headers = {"Accept": "application/vnd.github+json", "Authorization": f'Bearer {self.github_token}'} + load_content = requests.get(get_repo_variable, headers=headers, timeout=5) + time.sleep(1) + + if load_content.status_code == 200: + val = literal_eval(load_content.json()['value']) + logger.info(f"AUTOMERGE_ACTION_VAR value is {val}") + return val + + return False + + def compare_pr_differnce(self, txt): + """ Parse the content and extract packages for comparison. """ + regex = re.compile(r"(?P[\-\+])(?P[\w][\w\-\[\]]+)==(?P\d+\.\d+(\.\d+)?(\.[\w]+)?)") + reqs = {} + if not txt: + return [], [] + + # skipping zeroth index as it will be empty + files = txt.split("diff --git")[1:] + for file in files: + lines = file.split("\n") + filename_match = re.search(r"[\w\-\_]*.txt", lines[0]) + if not filename_match: + continue + filename = filename_match[0] + reqs[filename] = {} + for line in lines: + match = re.match(regex, line) + if match: + groups = match.groupdict() + keys = ('new_version', 'old_version') if groups['change'] == '+' \ + else ('old_version', 'new_version') + if groups['name'] in reqs[filename]: + reqs[filename][groups['name']][keys[0]] = groups['version'] + else: + reqs[filename][groups['name']] = {keys[0]: groups['version'], keys[1]: None} + combined_reqs = [] + for file, lst in reqs.items(): + for name, versions in lst.items(): + combined_reqs.append( + {"name": name, 'old_version': versions['old_version'], 'new_version': versions['new_version']} + ) + + unique_reqs = [dict(s) for s in set(frozenset(d.items()) for d in combined_reqs)] + valid_reqs = [] + suspicious_reqs = [] + for req in unique_reqs: + if req['new_version'] and req['old_version']: # if both values exits then do version comparison + old_version = Version(req['old_version']) + new_version = Version(req['new_version']) + + # skip, if the package location is changed in txt file only and both versions are same + if old_version == new_version: + continue + if new_version > old_version: + if new_version.major == old_version.major: + valid_reqs.append(req) + else: + suspicious_reqs.append(self._add_reason(req, "MAJOR")) + else: + suspicious_reqs.append(self._add_reason(req, "DOWNGRADE")) + else: + if req['new_version']: + suspicious_reqs.append(self._add_reason(req, "NEW")) + else: + suspicious_reqs.append(self._add_reason(req, "REMOVED")) + + return sorted(valid_reqs, key=lambda d: d['name']), sorted(suspicious_reqs, key=lambda d: d['name']) + + def make_readable_string(self, req): + """making string for readability""" + if 'reason' in req: + if req['reason'] == 'NEW': + return f"- **[{req['reason']}]** `{req['name']}`" \ + f" (`{req['new_version']}`) added to the requirements" + if req['reason'] == 'REMOVED': + return f"- **[{req['reason']}]** `{req['name']}`" \ + f" (`{req['old_version']}`) removed from the requirements" + # either major version bump or downgraded + return f"- **[{req['reason']}]** `{req['name']}` " \ + f"changes from `{req['old_version']}` to `{req['new_version']}`" + # valid requirement + return f"- `{req['name']}` changes from `{req['old_version']}` to `{req['new_version']}`" + + def delete_branch(self, repository, branch_name): + """ + Delete a branch from a repository. + """ + logger.info("Deleting Branch: %s", branch_name) + try: + ref = "heads/{}".format(branch_name) + branch_object = repository.get_git_ref(ref) + branch_object.delete() + except Exception as error: + raise Exception( + "Failed to delete branch" + ) from error + + def get_file_contents(self, repo_root, file_path): + """ + Return contents of local file + """ + try: + full_file_path = os.path.join(repo_root, file_path) + with open(full_file_path, 'r', encoding='utf-8') as opened_file: + data = opened_file.read() + except Exception as error: + raise Exception( + "Unable to read file: {}".format(file_path) + ) from error + + return data + + # pylint: disable=missing-function-docstring + def update_list_of_files(self, repository, repo_root, file_path_list, commit_message, sha, username): + input_trees_list = [] + base_git_tree = repository.get_git_tree(sha) + for file_path in file_path_list: + content = self.get_file_contents(repo_root, file_path) + input_tree = InputGitTreeElement(file_path, "100644", "blob", content=content) + input_trees_list.append(input_tree) + if len(input_trees_list) > 0: + new_git_tree = repository.create_git_tree(input_trees_list, base_tree=base_git_tree) + parents = [repository.get_git_commit(sha)] + author = InputGitAuthor(username, self.github_user_email) + commit_sha = repository.create_git_commit( + commit_message, new_git_tree, parents, author=author, committer=author + ).sha + return commit_sha + + return None + + +class PullRequestCreator: + + def __init__(self, repo_root, branch_name, user_reviewers, team_reviewers, commit_message, pr_title, + pr_body, target_branch='master', draft=False, output_pr_url_for_github_action=False, + force_delete_old_prs=False): + self.branch_name = branch_name + self.pr_body = pr_body + self.pr_title = pr_title + self.commit_message = commit_message + self.team_reviewers = team_reviewers + self.user_reviewers = user_reviewers + self.repo_root = repo_root + self.target_branch = target_branch + self.draft = draft + self.output_pr_url_for_github_action = output_pr_url_for_github_action + self.force_delete_old_prs = force_delete_old_prs + + github_helper = GitHubHelper() + + def _get_github_instance(self): + return self.github_helper.get_github_instance() + + def _get_user(self): + return self.github_instance.get_user() + + def _set_repository(self): + self.repository = self.github_helper.repo_from_remote(self.repo_root, ['origin']) + + def _set_updated_files_list(self, untracked_files_required=False): + self.updated_files_list = self.github_helper.get_updated_files_list(self.repo_root, untracked_files_required) + + def _create_branch(self, commit_sha): + self.github_helper.create_branch(self.repository, self.branch, commit_sha) + + def _set_github_data(self, untracked_files_required=False): + logger.info("Authenticating with Github") + self.github_instance = self._get_github_instance() + self.user = self._get_user() + + logger.info("Trying to connect to repo") + self._set_repository() + logger.info("Connected to %s", self.repository) + self._set_updated_files_list(untracked_files_required) + self.base_sha = self.github_helper.get_current_commit(self.repo_root) + self.branch = "refs/heads/repo-tools/{}-{}".format(self.branch_name, self.base_sha[:7]) + + def _branch_exists(self): + return self.github_helper.branch_exists(self.repository, self.branch) + + def _create_new_branch(self): + logger.info("updated files: %s", self.updated_files_list) + commit_sha = self.github_helper.update_list_of_files( + self.repository, + self.repo_root, + self.updated_files_list, + self.commit_message, + self.base_sha, + self.user.name + ) + self._create_branch(commit_sha) + + def _create_new_pull_request(self): + # If there are reviewers to be added, split them into python lists + if isinstance(self.user_reviewers, str) and self.user_reviewers: + user_reviewers = self.user_reviewers.split(',') + else: + user_reviewers = GithubObject.NotSet + + if isinstance(self.team_reviewers, str) and self.team_reviewers: + team_reviewers = self.team_reviewers.split(',') + else: + team_reviewers = GithubObject.NotSet + + pr = self.github_helper.create_pull_request( + self.repository, + self.pr_title, + self.pr_body, + self.target_branch, + self.branch, + user_reviewers=user_reviewers, + team_reviewers=team_reviewers, + # TODO: Remove hardcoded check in favor of a new --verify-reviewers CLI option + verify_reviewers=self.branch_name != 'cleanup-python-code', + draft=self.draft + ) + logger.info( + "Created PR: https://github.com/%s/pull/%s", + self.repository.full_name, + pr.number, + ) + if self.output_pr_url_for_github_action: + # using print rather than logger to avoid the logger + # prepending anything past which github actions wouldn't parse + print(f'generated_pr=https://github.com/{self.repository.full_name}/pull/{pr.number} >> $GITHUB_OUTPUT') + + def delete_old_pull_requests(self): + logger.info("Checking if there's any old pull requests to delete") + # Only delete old PRs with the same base name + # The prefix used to be 'jenkins' before we changed it to 'repo-tools' + filter_pattern = "(jenkins|repo-tools)/{}-[a-zA-Z0-9]*".format(re.escape(self.branch_name)) + deleted_pulls = self.github_helper.close_existing_pull_requests( + self.repository, self.user.login, + self.user.name, self.target_branch, + branch_name_filter=lambda name: re.fullmatch(filter_pattern, name) + ) + + for num, deleted_pull_number in enumerate(deleted_pulls): + if num == 0: + self.pr_body += "\n\nDeleted obsolete pull_requests:" + self.pr_body += "\nhttps://github.com/{}/pull/{}".format(self.repository.full_name, + deleted_pull_number) + + def _delete_old_branch(self): + logger.info("Deleting existing old branch with same name") + branch_name = self.branch.split('/', 2)[2] + self.github_helper.delete_branch(self.repository, branch_name) + + def create(self, delete_old_pull_requests, untracked_files_required=False): + self._set_github_data(untracked_files_required) + + if not self.updated_files_list: + logger.info("No changes needed") + return + + if self.force_delete_old_prs or delete_old_pull_requests: + self.delete_old_pull_requests() + if self._branch_exists(): + self._delete_old_branch() + + elif self._branch_exists(): + logger.info("Branch for this sha already exists") + return + + self._create_new_branch() + + self._create_new_pull_request() + + +if __name__ == '__main__': + main(auto_envvar_prefix="PR_CREATOR") # pylint: disable=no-value-for-parameter diff --git a/edx_repo_tools/pull_request_creator/extra.in b/edx_repo_tools/pull_request_creator/extra.in new file mode 100644 index 00000000..1f0afcd0 --- /dev/null +++ b/edx_repo_tools/pull_request_creator/extra.in @@ -0,0 +1,6 @@ +# additional dependencies needed by pull_request_creator + +-c ../../requirements/constraints.txt + +PyGithub +packaging # used in create pull request script to compare package versions diff --git a/edx_repo_tools/pull_request_creator/extra.txt b/edx_repo_tools/pull_request_creator/extra.txt new file mode 100644 index 00000000..76b02151 --- /dev/null +++ b/edx_repo_tools/pull_request_creator/extra.txt @@ -0,0 +1,40 @@ +# +# This file is autogenerated by pip-compile with Python 3.12 +# by the following command: +# +# make upgrade +# +certifi==2024.7.4 + # via requests +cffi==1.16.0 + # via + # cryptography + # pynacl +charset-normalizer==3.3.2 + # via requests +cryptography==43.0.0 + # via pyjwt +deprecated==1.2.14 + # via pygithub +idna==3.7 + # via requests +packaging==24.1 + # via -r edx_repo_tools/pull_request_creator/extra.in +pycparser==2.22 + # via cffi +pygithub==2.3.0 + # via -r edx_repo_tools/pull_request_creator/extra.in +pyjwt[crypto]==2.8.0 + # via pygithub +pynacl==1.5.0 + # via pygithub +requests==2.32.3 + # via pygithub +typing-extensions==4.12.2 + # via pygithub +urllib3==2.2.2 + # via + # pygithub + # requests +wrapt==1.16.0 + # via deprecated diff --git a/edx_repo_tools/release/tag_release.py b/edx_repo_tools/release/tag_release.py index 9c1fcf1b..098db4d6 100644 --- a/edx_repo_tools/release/tag_release.py +++ b/edx_repo_tools/release/tag_release.py @@ -52,6 +52,61 @@ def nice_tqdm(iterable, desc): return tqdm(iterable, desc=desc.ljust(27)) +def filter_repos(openedx_repo, catalog_repo): + """ + Return the subset of the repos with catalog-info.yaml file if they have 'openedx.org/release' section otherwise + with openedx.yaml file. + + Arguments: + openedx_repo (list of dictionaries): list of repos with data of openedx.yaml with `openedx-release` section. + catalog_repo (list of dictionaries): list of repos with data of catalog.yaml with 'openedx.org/release' section. + """ + result_dict = {} + for repo_key, openedx_data in openedx_repo.items(): + if repo_key in catalog_repo: + result_dict[repo_key] = catalog_repo[repo_key] + else: + result_dict[repo_key] = openedx_data + + for repo_key, catalog_data in catalog_repo.items(): + if repo_key not in result_dict: + result_dict[repo_key] = catalog_data + + return result_dict + + +def openedx_repos_with_catalog_info(hub, orgs=None, branches=None): + """ + Return a subset of the repos with catalog-info.yaml files: the repos + with an annotation `openedx.org/release`. + + Arguments: + hub (:class:`~github3.GitHub`): an authenticated GitHub instance. + orgs (list of str): The GitHub organizations to scan. Defaults to + OPENEDX_ORGS. + branches (list of str): The branches to scan in all repos in the selected + orgs, defaulting to the repo's default branch. + + Returns: + A dict from :class:`~github3.Repository` objects to openedx.yaml data for all of the + repos with an ``openedx-release`` key specified. + + """ + orgs = orgs or OPENEDX_ORGS + repos = {} + for repo, data in tqdm(iter_openedx_yaml('catalog-info.yaml', hub, orgs=orgs, branches=branches), desc='Find repos'): + + if 'metadata' in data: + annotations = data['metadata'].get('annotations') + if annotations: + # Check if 'openedx.org/release' is present in annotations + if 'openedx.org/release' in annotations: + repo = repo.refresh() + repos[repo] = data + + return repos + + def openedx_release_repos(hub, orgs=None, branches=None): """ Return a subset of the repos with openedx.yaml files: the repos @@ -71,9 +126,8 @@ def openedx_release_repos(hub, orgs=None, branches=None): """ orgs = orgs or OPENEDX_ORGS repos = {} - - for repo, data in tqdm(iter_openedx_yaml(hub, orgs=orgs, branches=branches), desc='Find repos'): - if data.get('openedx-release'): + for repo, data in tqdm(iter_openedx_yaml('openedx.yaml', hub, orgs=orgs, branches=branches), desc='Find repos'): + if data.get('openedx-release'): repo = repo.refresh() repos[repo] = data @@ -180,7 +234,10 @@ def override_repo_refs(repos, override_ref=None, overrides=None): for repo, repo_data in repos.items(): local_override = overrides.get(repo.full_name, override_ref) if local_override: - repo_data["openedx-release"]["ref"] = local_override + if "metadata" in repo_data: + repo_data["metadata"]["annotations"]["openedx.org/release"] = local_override + elif "openedx-release" in repo_data: + repo_data["openedx-release"]["ref"] = local_override return repos @@ -219,11 +276,19 @@ def commit_ref_info(repos, skip_invalid=False): } """ - ref_info = {} for repo, repo_data in nice_tqdm(repos.items(), desc='Find commits'): # are we specifying a ref? - ref = repo_data["openedx-release"].get("ref") + + if 'metadata' in repo_data: + annotations = repo_data['metadata'].get('annotations', {}) + ref = annotations.get('openedx.org/release') + + # Check if 'openedx-release' is present in repo_data and get the ref + # This check will be remove once we will just support catalog_info.yaml and remove openedx.yaml + elif 'openedx-release' in repo_data: + ref = repo_data['openedx-release'].get('ref') + if ref: try: ref_info[repo] = get_latest_commit_for_ref(repo, ref) @@ -742,19 +807,21 @@ def main(hub, ref, use_tag, override_ref, overrides, interactive, quiet, for r in json.load(f) } else: - repos = openedx_release_repos(hub, orgs, branches) + repos = filter_repos(openedx_release_repos(hub, orgs, branches), openedx_repos_with_catalog_info(hub, orgs, branches)) + if output_repos: with open(output_repos, "w") as f: dumped = [{"repo": repo.as_dict(), "data": data} for repo, data in repos.items()] json.dump(dumped, f, indent=2, sort_keys=True) if not repos: - raise ValueError("No repos marked for openedx-release in their openedx.yaml files!") + raise ValueError("No repos marked for openedx-release neither in openedx.yaml nor catalog_info.yaml files!") if included_repos: repos = include_only_repos(repos, included_repos) - repos = trim_skipped_repos(repos, skip_repos) - repos = trim_dependent_repos(repos) - repos = trim_indecisive_repos(repos) + if 'openedx-release' in repos: + repos = trim_skipped_repos(repos, skip_repos) + repos = trim_dependent_repos(repos) + repos = trim_indecisive_repos(repos) repos = override_repo_refs( repos, override_ref=override_ref, diff --git a/edx_repo_tools/repo_access_scraper/extra.txt b/edx_repo_tools/repo_access_scraper/extra.txt index c7164245..a3fcd0e7 100644 --- a/edx_repo_tools/repo_access_scraper/extra.txt +++ b/edx_repo_tools/repo_access_scraper/extra.txt @@ -1,16 +1,16 @@ # -# This file is autogenerated by pip-compile with Python 3.8 +# This file is autogenerated by pip-compile with Python 3.12 # by the following command: # # make upgrade # -greenlet==2.0.2 - # via playwright -playwright==1.37.0 - # via -r edx_repo_tools/repo_access_scraper/extra.in -pyee==9.0.4 - # via playwright -typing-extensions==4.7.1 +greenlet==3.0.3 # via + # -c edx_repo_tools/repo_access_scraper/../../requirements/constraints.txt # playwright - # pyee +playwright==1.45.1 + # via -r edx_repo_tools/repo_access_scraper/extra.in +pyee==11.1.0 + # via playwright +typing-extensions==4.12.2 + # via pyee diff --git a/edx_repo_tools/repo_checks/README.rst b/edx_repo_tools/repo_checks/README.rst index 14178927..82f306e3 100644 --- a/edx_repo_tools/repo_checks/README.rst +++ b/edx_repo_tools/repo_checks/README.rst @@ -1,7 +1,7 @@ Repo Checks ########### -This is a tool & a lightweight framework for automating administrative tasks through GitHub's API. +This is a tool & a lightweight framework for automating administrative tasks through GitHub's API. These checks are generally written by Axim engineers to help us to help us establish some consistency across the plethora of repositories under the `openedx GitHub organization `_, although they theoretically could be applied to any GitHub organization. @@ -20,7 +20,7 @@ The ``repo_checks`` command line tool lets you execute these checks, either as d Usage ***** -You will need a GH personal access token with the following scopes: +You will need a GH personal access token (classic, not "Fine-grained tokens") with the following scopes: * admin:org * repo @@ -47,6 +47,18 @@ Finally, when you're ready, you can actually apply the fixes to GitHub:: repo_checks --no-dry-run <... same args you used above ...> +Note this will open pull requests in the relevant repos. Some repos intentionally don't have certain workflows (for example, ``docs.openedx.org`` does not use ``commitlint``), so please tag maintainers on the pull requests so they can decide whether or not to use the added or changed workflows. + +When running over all repos in an organization, the script runs on the newest repos first as those are the most likely +to be out of compliance. + +A note about rate-limiting, if your run is halted due to rate-limiting, note the last repo that the check was running on +in the output and restart the job from there once your rate limit has been reset:: + + repo_checks ... # original run + ... # rate limiting or other error halts the run + repo_checks ... --start-at "" # Re run starting from where we halted. + Contributing ************ @@ -54,7 +66,7 @@ Contributing * Consider adding `to the test suite <../../tests/test_repo_checks.py>`_ even though it is currently sparse. -* CI will run tests for you, but not linting, so ensure your changes don't break repo_checks' pylint: ``pylint edx_repo_tools/repo_checks``. +* CI will run tests for you, but not linting, so ensure your changes don't break repo_checks' pylint: ``pylint edx_repo_tools/repo_checks``. * Dry-run the script and save the output (non-Axim engineers: you should be able to do this with a read-only GH access token). diff --git a/edx_repo_tools/repo_checks/extra.txt b/edx_repo_tools/repo_checks/extra.txt index 1e1b71d1..7edff4c6 100644 --- a/edx_repo_tools/repo_checks/extra.txt +++ b/edx_repo_tools/repo_checks/extra.txt @@ -1,32 +1,32 @@ # -# This file is autogenerated by pip-compile with Python 3.8 +# This file is autogenerated by pip-compile with Python 3.12 # by the following command: # # make upgrade # cache-to-disk==2.0.0 # via -r edx_repo_tools/repo_checks/extra.in -certifi==2023.7.22 +certifi==2024.7.4 # via requests -charset-normalizer==3.2.0 +charset-normalizer==3.3.2 # via requests click==8.1.7 # via -r edx_repo_tools/repo_checks/extra.in -fastcore==1.5.29 +fastcore==1.5.54 # via ghapi -ghapi==1.0.4 +ghapi==1.0.5 # via -r edx_repo_tools/repo_checks/extra.in -idna==3.4 +idna==3.7 # via requests -packaging==23.1 +packaging==24.1 # via # fastcore # ghapi pyyaml==6.0.1 # via -r edx_repo_tools/repo_checks/extra.in -requests==2.31.0 +requests==2.32.3 # via -r edx_repo_tools/repo_checks/extra.in -urllib3==2.0.4 +urllib3==2.2.2 # via requests # The following packages are considered to be unsafe in a requirements file: diff --git a/edx_repo_tools/repo_checks/labels.yaml b/edx_repo_tools/repo_checks/labels.yaml index e2a0565b..f0c15084 100644 --- a/edx_repo_tools/repo_checks/labels.yaml +++ b/edx_repo_tools/repo_checks/labels.yaml @@ -16,6 +16,21 @@ # Describe your color with a comment so it's easier to review. +### LABELS USED FOR TRACKING PROJECT-WIDE CONCERNS + +- name: "release testing" + color: "aa66aa" # magenta? + description: "Affects the upcoming release (attention needed)" + +- name: "release blocker" + color: "bb33bb" # MAGENTA! + description: "Blocks the upcoming release (fix needed)" + +- name: "business-specific" + color: "d93f0b" # scarlet red... + description: "Code that relates to a specific user and should be refactored and removed." + + ### LABELS INDICATING HOW ISSUES CAN BE ENAGAGED WITH. - name: "good first issue" @@ -26,6 +41,10 @@ color: "54976d" # fenway green description: "Ready to be picked up by anyone in the community" +- name: "hacktoberfest-accepted" + color: "e2ba53" # golden-yellow + description: "Issue accepted for hacktoberfest participants" + ### LABELS INDICATING BROAD THEMES OF WORK. ### MORE THAN ONE OF THESE MAY APPLY AT A TIME. @@ -54,6 +73,13 @@ color: "0052cc" # royal blue description: "Relates to documentation improvements" +- name: "FC" + color: "038E8E" # tealish + description: "Relates to an Axim Funded Contribution project" + +- name: "performance" + color: "4a03fc" # blue-purple + description: "Relates to improving latency/throughput or reducing resource usage" ### LABELS INDICATING THE SCOPE OR FUNCTION OF THE ISSUE. @@ -67,6 +93,10 @@ color: "d93f0b" # scarlet red... description: "Report of or fix for something that isn't working as intended" +- name: "flaky-test" + color: "006b75" # teal + description: "Details a test removed as part of the flaky test process" + - name: "discovery" color: "d876e3" # a curious shade of lavendar description: "Pre-work to determine if an idea is feasible" @@ -96,6 +126,12 @@ ### YOU CAN SEE THE CURRENT PROJECT MANAGERS HERE: ### https://openedx.atlassian.net/wiki/spaces/COMM/pages/3548807177/Community+Contributions+Project+Manager#Current-OSPR-Project-Managers +# This label is a manually applied by PR authors or others to indicate that the +# pull request is backporting a change on main to a named release. +- name: "backport" + color: "354d73" # violet blue + description: "PR backports a change from main to a named release." + # Waiting for the author to respond to change requests, feedback, failing # tests, etc. Usually this label is used for PRs in board statuses other # than “Waiting for Author” (e.g. the pull request is “In Eng Review” @@ -142,6 +178,13 @@ color: "f5424b" # crimson red description: "Author's first PR to this repository, awaiting test authorization from Axim" +# This helps with PR triage and providing better visibility to CCs for PRs that don't have reviewers, +# or PRs that have reviewers assigned that the project managers can't remove +# (e.g. reviewers who have since abandoned the PR or that are no longer responsible for that repo). +- name: "needs reviewer assigned" + color: "e3735e" # terra cotta + description: "PR needs to be (re-)assigned a new reviewer" + # Automatically added by bot to PRs coming from community contributors # other than (a) Axim itself or (b) those under 2U's entity CLA. - name: "open-source-contribution" @@ -157,9 +200,15 @@ # Open edX product managers will add this label to PRs which require # product review. - name: "product review" - color: "c97bf7" # light puple + color: "c97bf7" # light purple description: "PR requires product review before merging" +# Open edX product managers will add this label to PRs which have completed +# product review. +- name: "product review complete" + color: "7bf7d8" # light teal + description: "PR has gone through product review" + # As of May 2023, 2U has a "blended" development structure through which they design, fund & # review contributions from other community providers. Their own tools or project managers # may add this label in order to manage such contributions. @@ -176,3 +225,15 @@ color: "ffd700" # gold! description: "PR author is a Core Contributor (who may or may not have write access to this repo)." + +## LABELS FOR AUTOMATED TOOLING + +# The OSPR bot will recognize "jira:company" labels and make Jira issues for +# pull requests labelled with them. +- name: "jira:2u" + color: "1475D4" # 2Blue + description: "We want an issue in the 2U Jira instance" + +- name: "create-sandbox" + color: "fdee73" # sandy yellow + description: "open-craft-grove should create a sandbox environment from this PR" diff --git a/edx_repo_tools/repo_checks/repo_checks.py b/edx_repo_tools/repo_checks/repo_checks.py index 8eaaa8e8..2ae04adc 100644 --- a/edx_repo_tools/repo_checks/repo_checks.py +++ b/edx_repo_tools/repo_checks/repo_checks.py @@ -14,7 +14,8 @@ import importlib.resources import re import textwrap -from functools import lru_cache +import typing as t +from functools import cache from itertools import chain from pprint import pformat @@ -37,10 +38,12 @@ LABELS_YAML_FILENAME = "./labels.yaml" -# Note: This is functionally equivalent to `from functools import cache`, -# which becomes available in Python 3.9. -# https://docs.python.org/3/library/functools.html#functools.cache -cache = lru_cache(maxsize=None) + +def all_paged_items(func, *args, **kwargs): + """ + Get all items from a GhApi function returning paged results. + """ + return chain.from_iterable(paged(func, *args, per_page=100, **kwargs)) def is_security_private_fork(api, org, repo): @@ -102,11 +105,25 @@ class Check: (is_relevant, check, fix, and dry_run). """ - def __init__(self, api, org, repo): + _registered = {} + + def __init__(self, api: GhApi, org: str, repo: str): self.api = api self.org_name = org self.repo_name = repo + @staticmethod + def register(subclass: type[t.Self]) -> type[t.Self]: + """ + Decorate a Check subclass so that it will be available in main() + """ + Check._registered[subclass.__name__] = subclass + return subclass + + @staticmethod + def get_registered_checks() -> dict[str, type[t.Self]]: + return Check._registered.copy() + def is_relevant(self) -> bool: """ Checks to see if the given check is relevant to run on the @@ -150,7 +167,167 @@ def dry_run(self): raise NotImplementedError -class EnsureWorkflowTemplates(Check): +@Check.register +class Settings(Check): + """ + There are certain settings that we agree we want to be set a specific way on all repos. This check + will ensure that those settings are set correctly on all non-security repos. + + Settings: + - Issues should be enabled. + - Wikis should be disabled. The confluence wiki should be used. + - Allow auto-merge to be used. (Does not enable auto-merge, just allows committers to enable it on a per PR basis.) + - Branches should be deleted on merge. + """ + + def __init__(self, api: GhApi, org: str, repo: str): + super().__init__(api, org, repo) + self.expected_settings = { + "has_issues": True, + "has_wiki": False, + "allow_auto_merge": True, + "delete_branch_on_merge": True, + } + + def is_relevant(self) -> bool: + """ + All non security fork repos, public or private. + """ + return not is_security_private_fork(self.api, self.org_name, self.repo_name) + + def check(self) -> tuple[bool, str]: + """ + Verify whether or not the check is failing. + + This should not change anything and should not have a side-effect + other than populating `self` with any data that is needed later for + `fix` or `dry_run`. + + The string in the return tuple should be a human readable reason + that the check failed. + """ + repo = self.api.repos.get(owner=self.org_name, repo=self.repo_name) + + self.settings_that_dont_match = [] + for setting in self.expected_settings: + actual_value = repo.get(setting) + if actual_value != self.expected_settings[setting]: + self.settings_that_dont_match.append((setting, actual_value)) + + if self.settings_that_dont_match: + # Looks like this: + # Some settings don't match our expectations: + # allow_auto_merge: False + # delete_branch_on_merge: False + return ( + False, + f"Some settings don't match our expectations:\n\t\t" + + "\n\t\t".join( + [ + f"{setting[0]}: {setting[1]}" + for setting in self.settings_that_dont_match + ] + ), + ) + + return (True, "All expected settings are set correctly.") + + def dry_run(self): + return self.fix(dry_run=True) + + def fix(self, dry_run=False): + steps = [] + if self.settings_that_dont_match: + if not dry_run: + self.api.repos.update( + self.org_name, self.repo_name, **self.expected_settings + ) + steps.append( + f"Updated repo settings to match expectations.\n\t" + + "\n\t".join( + [ + f"{setting[0]}: {self.expected_settings[setting[0]]}" + for setting in self.settings_that_dont_match + ] + ) + ) + else: + steps.append("No changes needed.") + return steps + + +@Check.register +class NoAdminOrMaintainTeams(Check): + """ + Teams should not be granted `admin` or `maintain` access to a repository unless the access + is exceptional and it is noted here. All other `admin` and `maintain` access is downgraded to + `write` access. + """ + + def __init__(self, api: GhApi, org: str, repo: str): + super().__init__(api, org, repo) + self.teams_to_downgrade = [] + + def is_relevant(self) -> bool: + """ + All non security fork repos, public or private. + """ + return not is_security_private_fork(self.api, self.org_name, self.repo_name) + + def check(self) -> tuple[bool, str]: + """ + Verify whether or not the check is failing. + + This should not change anything and should not have a side-effect + other than populating `self` with any data that is needed later for + `fix` or `dry_run`. + + The string in the return tuple should be a human readable reason + that the check failed. + """ + teams = all_paged_items( + self.api.repos.list_teams, owner=self.org_name, repo=self.repo_name + ) + for team in teams: + if team.permission in ["admin", "maintain"]: + self.teams_to_downgrade.append(team) + + if self.teams_to_downgrade: + team_and_permissions = list( + {f"{team.slug}: {team.permission}" for team in self.teams_to_downgrade} + ) + return ( + False, + f"Some teams have excessive permissions:\n\t\t" + + "\n\t\t".join(team_and_permissions), + ) + + return (True, "No teams with `admin` or `maintain` permissions.") + + def dry_run(self): + return self.fix(dry_run=True) + + def fix(self, dry_run=False): + steps = [] + for team in self.teams_to_downgrade: + if not dry_run: + self.api.teams.add_or_update_repo_permissions_in_org( + self.org_name, + team.slug, + self.org_name, + self.repo_name, + "push", + ) + + steps.append( + f"Reduced permission of `{team.slug}` from `{team.permission}` to `push`" + ) + + return steps + + +@Check.register +class Workflows(Check): """ There are certain github action workflows that we to exist on all repos exactly as they are defined in the `.github` repo in the org. @@ -169,6 +346,15 @@ def __init__(self, api: GhApi, org: str, repo: str): "add-remove-label-on-comment.yml", ] + # A lost of repos and workflows that should not be added to them. + self.exceptions = { + # We don't want commitlint on the docs.openedx.org and edx-documentation repos because + # we want to encourage contributions from non-technical contributors and reduce their + # barrier to entry. + "docs.openedx.org": ["commitlint.yml"], + "edx-documentation": ["commitlint.yml"], + } + self.branch_name = "repo_checks/ensure_workflows" self.files_to_create = [] @@ -192,6 +378,22 @@ def check(self): default_branch = repo.default_branch files_that_differ, files_that_are_missing = self._check_branch(default_branch) + + extra_message = "No repo specific workflows to ignore." + # Update based on repo specific exceptions + if self.repo_name in self.exceptions: + extra_message = ( + "Ignoring repo specific exceptions: {!r}".format( + self.exceptions[self.repo_name] + ) + ) + # We have exceptions for this repo, remove them from the two lists above. + for item in self.exceptions[self.repo_name]: + if item in files_that_differ: + files_that_differ.remove(item) + if item in files_that_are_missing: + files_that_are_missing.remove(item) + # Return False and save the list of files that need to be updated. if files_that_differ or files_that_are_missing: self.files_to_create = files_that_are_missing @@ -199,12 +401,14 @@ def check(self): return ( False, f"Some workflows in this repo don't match the template.\n" - f"\t\t{files_that_differ=}\n\t\t{files_that_are_missing=}", + f"\t\t{files_that_differ=}\n\t\t{files_that_are_missing=}\n" + f"\t\t{extra_message}", ) return ( True, - "All desired workflows are in sync with what's in the .github repo.", + "All desired workflows are in sync with what's in the .github repo.\n" + f"\t\t{extra_message}", ) def _check_branch(self, branch_name) -> tuple[list[str], list[str]]: @@ -373,14 +577,11 @@ def fix(self, dry_run=False): ) # Check to see if a PR exists - prs = chain.from_iterable( - paged( - self.api.pulls.list, - owner=self.org_name, - repo=self.repo_name, - head=self.branch_name, - per_page=100, - ) + prs = all_paged_items( + self.api.pulls.list, + owner=self.org_name, + repo=self.repo_name, + head=self.branch_name, ) prs = [pr for pr in prs if pr.head.ref == self.branch_name] @@ -411,7 +612,8 @@ def fix(self, dry_run=False): return steps -class EnsureLabels(Check): +@Check.register +class Labels(Check): """ All repos in the org should have certain labels. """ @@ -438,13 +640,10 @@ def check(self): """ See if our labels exist. """ - existing_labels_from_api = chain.from_iterable( - paged( - self.api.issues.list_labels_for_repo, - self.org_name, - self.repo_name, - per_page=100, - ) + existing_labels_from_api = all_paged_items( + self.api.issues.list_labels_for_repo, + self.org_name, + self.repo_name, ) existing_labels = { self._simplify_label(label.name): { @@ -529,12 +728,12 @@ def _simplify_label(self, label: str): return simplified_label -class RequireTeamPermission(Check): +class TeamAccess(Check): """ Require that a team has a certain level of access to a repository. To use this class as a check, create a subclass that specifies a particular - team and permission level, such as RequireTriageTeamAccess below. + team and permission level, such as TriageTeam below. """ def __init__(self, api: GhApi, org: str, repo: str, team: str, permission: str): @@ -555,13 +754,10 @@ def is_relevant(self): raise NotImplementedError def check(self): - teams = chain.from_iterable( - paged( - self.api.repos.list_teams, - self.org_name, - self.repo_name, - per_page=100, - ) + teams = all_paged_items( + self.api.repos.list_teams, + self.org_name, + self.repo_name, ) team_permissions = {team.slug: team.permission for team in teams} @@ -605,7 +801,8 @@ def fix(self, dry_run=False): raise -class RequireTriageTeamAccess(RequireTeamPermission): +@Check.register +class TriageTeam(TeamAccess): """ Ensure that the openedx-triage team grants Triage access to every public repo in the org. """ @@ -620,7 +817,8 @@ def is_relevant(self): return is_public(self.api, self.org_name, self.repo_name) -class RequiredCLACheck(Check): +@Check.register +class EnforceCLA(Check): """ This class validates the following: @@ -640,7 +838,7 @@ def __init__(self, api, org, repo): self.cla_team = "cla-checker" self.cla_team_permission = "push" - self.team_check = RequireTeamPermission( + self.team_check = TeamAccess( api, org, repo, @@ -880,14 +1078,120 @@ def _get_update_params_from_get_branch_protection(self): return params -CHECKS = [ - RequiredCLACheck, - RequireTriageTeamAccess, - EnsureLabels, - EnsureWorkflowTemplates, -] -CHECKS_BY_NAME = {check_cls.__name__: check_cls for check_cls in CHECKS} -CHECKS_BY_NAME_LOWER = {check_cls.__name__.lower(): check_cls for check_cls in CHECKS} +@Check.register +class NoDirectUsers(Check): + """ + Users should not have direct repo access + """ + + def __init__(self, api: GhApi, org: str, repo: str): + super().__init__(api, org, repo) + self.users_list = [] + + def is_relevant(self) -> bool: + """ + All non security fork repos, public or private. + """ + return not is_security_private_fork(self.api, self.org_name, self.repo_name) + + def check(self) -> tuple[bool, str]: + """ + Verify whether or not the check is failing. + + This should not change anything and should not have a side-effect + other than populating `self` with any data that is needed later for + `fix` or `dry_run`. + + The string in the return tuple should be a human readable reason + that the check failed. + """ + self.users_list = list(all_paged_items( + self.api.repos.list_collaborators, owner=self.org_name, repo=self.repo_name, affiliation='direct' + )) + users = [f"{user.login}: {user.role_name}" for user in self.users_list] + if users: + return ( + False, + f"Some users have direct repo access:\n\t\t" + + "\n\t\t".join(users), + ) + return (True, "No user has direct repo access.") + + def dry_run(self): + return self.fix(dry_run=True) + + def fix(self, dry_run=False): + steps = [] + for user in self.users_list: + if not dry_run: + self.api.repos.remove_collaborator( + owner=self.org_name, + repo=self.repo_name, + username=user.login, + ) + steps.append( + f"Removed direct access to the repository for user {user.login}" + ) + + return steps + + +@Check.register +class EnsureNoOutsideCollaborators(Check): + """ + Repository shouldn't have outside collaborators + """ + + def __init__(self, api: GhApi, org: str, repo: str): + super().__init__(api, org, repo) + self.users_list = [] + + def is_relevant(self) -> bool: + """ + All non security fork repos, public or private. + """ + return not is_security_private_fork(self.api, self.org_name, self.repo_name) + + def check(self) -> tuple[bool, str]: + """ + Verify whether or not the check is failing. + + This should not change anything and should not have a side-effect + other than populating `self` with any data that is needed later for + `fix` or `dry_run`. + + The string in the return tuple should be a human readable reason + that the check failed. + """ + self.users_list = list(all_paged_items( + self.api.repos.list_collaborators, owner=self.org_name, repo=self.repo_name, affiliation='outside' + )) + users = [f"{user.login}: {user.role_name}" for user in self.users_list] + if users: + return ( + False, + f"The repo has some outside collaborators:\n\t\t" + + "\n\t\t".join(users), + ) + return (True, "The repo doesn't have any outside collaborators.") + + def dry_run(self): + return self.fix(dry_run=True) + + def fix(self, dry_run=False): + steps = [] + for user in self.users_list: + if not dry_run: + self.api.repos.remove_collaborator( + owner=self.org_name, + repo=self.repo_name, + username=user.login, + ) + steps.append( + f"Removed outside collaborator {user.login}" + ) + + return steps @click.command() @@ -917,7 +1221,7 @@ def _get_update_params_from_get_branch_protection(self): "check_names", default=None, multiple=True, - type=click.Choice(CHECKS_BY_NAME.keys(), case_sensitive=False), + type=click.Choice(Check.get_registered_checks().keys(), case_sensitive=False), help=f"Limit to specific check(s), case-insensitive.", ) @click.option( @@ -943,14 +1247,11 @@ def main(org, dry_run, _github_token, check_names, repos, start_at): if not repos: repos = [ repo.name - for repo in chain.from_iterable( - paged( - api.repos.list_for_org, - org, - sort="created", - direction="desc", - per_page=100, - ) + for repo in all_paged_items( + api.repos.list_for_org, + org, + sort="created", + direction="desc", ) ] @@ -959,9 +1260,9 @@ def main(org, dry_run, _github_token, check_names, repos, start_at): click.secho("No Actual Changes Being Made", fg="yellow") if check_names: - active_checks = [CHECKS_BY_NAME[check_name] for check_name in check_names] + active_checks = [Check.get_registered_checks()[check_name] for check_name in check_names] else: - active_checks = CHECKS + active_checks = list(Check.get_registered_checks().values()) click.secho(f"The following checks will be run:", fg="magenta", bold=True) active_checks_string = "\n".join( "\t" + check_cls.__name__ for check_cls in active_checks diff --git a/pylintrc b/pylintrc index 044bb0c3..7146214f 100644 --- a/pylintrc +++ b/pylintrc @@ -64,7 +64,7 @@ # SERIOUSLY. # # ------------------------------ -# Generated by edx-lint version: 5.2.5 +# Generated by edx-lint version: 5.3.4 # ------------------------------ [MASTER] ignore = @@ -259,6 +259,7 @@ enable = useless-suppression, disable = bad-indentation, + broad-exception-raised, consider-using-f-string, duplicate-code, file-ignored, @@ -380,6 +381,6 @@ ext-import-graph = int-import-graph = [EXCEPTIONS] -overgeneral-exceptions = Exception +overgeneral-exceptions = builtins.Exception -# 0bb4a6d612f83352ced91b8f50942dfac7d30cd2 +# 6b646c624a39204ce807909aabd80bf4c7d28116 diff --git a/requirements/base.txt b/requirements/base.txt index aa3ebbc6..4688f3a7 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile with Python 3.8 +# This file is autogenerated by pip-compile with Python 3.12 # by the following command: # # make upgrade @@ -8,136 +8,133 @@ appdirs==1.4.4 # via # -r requirements/base.in # fissix -attrs==23.1.0 +attrs==23.2.0 # via bowler backports-csv==1.0.7 # via -r requirements/base.in bowler==0.9.0 # via -r requirements/base.in -cachecontrol==0.13.1 +cachecontrol==0.14.0 # via -r requirements/base.in -certifi==2023.7.22 +cachetools==5.4.0 + # via tox +certifi==2024.7.4 # via requests -cffi==1.15.1 +cffi==1.16.0 # via cryptography -charset-normalizer==3.2.0 +chardet==5.2.0 + # via tox +charset-normalizer==3.3.2 # via requests click==8.1.7 # via # -r requirements/base.in # bowler # moreorless -cryptography==41.0.3 +colorama==0.4.6 + # via tox +cryptography==43.0.0 # via pyjwt -distlib==0.3.7 +distlib==0.3.8 # via virtualenv -docutils==0.20.1 +docutils==0.21.2 # via statistics -exceptiongroup==1.1.3 - # via pytest -execnet==2.0.2 +execnet==2.1.1 # via pytest-xdist -filelock==3.12.3 +filelock==3.15.4 # via # tox # virtualenv -fissix==21.11.13 +fissix==24.4.24 # via bowler -gitdb==4.0.10 +gitdb==4.0.11 # via gitpython github3-py==4.0.1 # via -r requirements/base.in -gitpython==3.1.32 +gitpython==3.1.43 # via -r requirements/base.in -idna==3.4 +idna==3.7 # via requests iniconfig==2.0.0 # via pytest -lazy==1.5 +lazy==1.6 # via -r requirements/base.in lockfile==0.12.2 # via -r requirements/base.in -more-itertools==10.1.0 +more-itertools==10.3.0 # via -r requirements/base.in moreorless==0.4.0 # via bowler -msgpack==1.0.5 +msgpack==1.0.8 # via cachecontrol -packaging==23.1 +packaging==24.1 # via + # pyproject-api # pytest # tox -path==16.7.1 +path==16.14.0 # via path-py path-py==12.5.0 # via -r requirements/base.in -platformdirs==3.10.0 - # via virtualenv -pluggy==1.3.0 +platformdirs==4.2.2 + # via + # tox + # virtualenv +pluggy==1.5.0 # via # pytest # tox -py==1.11.0 - # via tox -pycparser==2.21 +pycparser==2.22 # via cffi pyjwt[crypto]==2.8.0 # via github3-py -pytest==7.4.0 +pyproject-api==1.7.1 + # via tox +pytest==8.3.1 # via # -r requirements/base.in # pytest-logging # pytest-xdist pytest-logging==2015.11.4 # via -r requirements/base.in -pytest-xdist==3.3.1 +pytest-xdist==3.6.1 # via -r requirements/base.in -python-dateutil==2.8.2 +python-dateutil==2.9.0.post0 # via # -r requirements/base.in # github3-py -python-dotenv==1.0.0 +python-dotenv==1.0.1 # via -r requirements/base.in pyyaml==6.0.1 # via -r requirements/base.in -requests==2.31.0 +requests==2.32.3 # via # -r requirements/base.in # cachecontrol # github3-py -ruamel-yaml==0.17.32 +ruamel-yaml==0.18.6 # via -r requirements/base.in -ruamel-yaml-clib==0.2.7 +ruamel-yaml-clib==0.2.8 # via ruamel-yaml six==1.16.0 - # via - # python-dateutil - # tox -smmap==5.0.0 + # via python-dateutil +smmap==5.0.1 # via gitdb statistics==1.0.3.5 # via -r requirements/base.in -tomli==2.0.1 - # via - # pytest - # tox -tox==3.28.0 - # via - # -c requirements/common_constraints.txt - # -r requirements/base.in -tqdm==4.66.1 +tox==4.16.0 + # via -r requirements/base.in +tqdm==4.66.4 # via -r requirements/base.in -typing-extensions==4.7.1 - # via filelock uritemplate==4.1.1 # via # -r requirements/base.in # github3-py -urllib3==2.0.4 +urllib3==2.2.2 # via requests urlobject==2.4.3 # via -r requirements/base.in -virtualenv==20.24.4 +virtualenv==20.26.3 # via tox volatile==2.1.0 # via bowler diff --git a/requirements/common_constraints.txt b/requirements/common_constraints.txt index afe6aa88..b957ec4c 100644 --- a/requirements/common_constraints.txt +++ b/requirements/common_constraints.txt @@ -13,15 +13,28 @@ # using LTS django version -Django<4.0 +Django<5.0 # elasticsearch>=7.14.0 includes breaking changes in it which caused issues in discovery upgrade process. # elastic search changelog: https://www.elastic.co/guide/en/enterprise-search/master/release-notes-7.14.0.html +# See https://github.com/openedx/edx-platform/issues/35126 for more info elasticsearch<7.14.0 # django-simple-history>3.0.0 adds indexing and causes a lot of migrations to be affected django-simple-history==3.0.0 -# tox>4.0.0 isn't yet compatible with many tox plugins, causing CI failures in almost all repos. -# Details can be found in this discussion: https://github.com/tox-dev/tox/discussions/1810 -tox<4.0.0 +# opentelemetry requires version 6.x at the moment: +# https://github.com/open-telemetry/opentelemetry-python/issues/3570 +# Normally this could be added as a constraint in edx-django-utils, where we're +# adding the opentelemetry dependency. However, when we compile pip-tools.txt, +# that uses version 7.x, and then there's no undoing that when compiling base.txt. +# So we need to pin it globally, for now. +# Ticket for unpinning: https://github.com/openedx/edx-lint/issues/407 +importlib-metadata<7 + +# Cause: https://github.com/openedx/event-tracking/pull/290 +# event-tracking 2.4.1 upgrades to pymongo 4.4.0 which is not supported on edx-platform. +# We will pin event-tracking to do not break existing installations +# This can be unpinned once https://github.com/openedx/edx-platform/issues/34586 +# has been resolved and edx-platform is running with pymongo>=4.4.0 +event-tracking<2.4.1 diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 88be8121..34a35df6 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -1 +1,4 @@ -c common_constraints.txt + +# playwright and sqlalchemy requirements conflict for greenlet<=3.0.1 +greenlet>3.0.1 diff --git a/requirements/development.txt b/requirements/development.txt index 95998814..7f8df4e1 100644 --- a/requirements/development.txt +++ b/requirements/development.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile with Python 3.8 +# This file is autogenerated by pip-compile with Python 3.12 # by the following command: # # make upgrade @@ -8,11 +8,11 @@ appdirs==1.4.4 # via # -r requirements/base.txt # fissix -astroid==2.15.6 +astroid==3.2.4 # via # pylint # pylint-celery -attrs==23.1.0 +attrs==23.2.0 # via # -r requirements/base.txt # bowler @@ -20,19 +20,27 @@ backports-csv==1.0.7 # via -r requirements/base.txt bowler==0.9.0 # via -r requirements/base.txt -build==0.10.0 +build==1.2.1 # via pip-tools -cachecontrol==0.13.1 +cachecontrol==0.14.0 # via -r requirements/base.txt -certifi==2023.7.22 +cachetools==5.4.0 + # via + # -r requirements/base.txt + # tox +certifi==2024.7.4 # via # -r requirements/base.txt # requests -cffi==1.15.1 +cffi==1.16.0 # via # -r requirements/base.txt # cryptography -charset-normalizer==3.2.0 +chardet==5.2.0 + # via + # -r requirements/base.txt + # tox +charset-normalizer==3.3.2 # via # -r requirements/base.txt # requests @@ -47,50 +55,50 @@ click==8.1.7 # pip-tools click-log==0.4.0 # via edx-lint -code-annotations==1.5.0 +code-annotations==1.8.0 # via edx-lint -cryptography==41.0.3 +colorama==0.4.6 + # via + # -r requirements/base.txt + # tox +cryptography==43.0.0 # via # -r requirements/base.txt # pyjwt -dill==0.3.7 +dill==0.3.8 # via pylint -distlib==0.3.7 +distlib==0.3.8 # via # -r requirements/base.txt # virtualenv -docutils==0.20.1 +docutils==0.21.2 # via # -r requirements/base.txt # statistics -edx-lint==5.3.4 +edx-lint==5.3.7 # via -r requirements/development.in -exceptiongroup==1.1.3 - # via - # -r requirements/base.txt - # pytest -execnet==2.0.2 +execnet==2.1.1 # via # -r requirements/base.txt # pytest-xdist -filelock==3.12.3 +filelock==3.15.4 # via # -r requirements/base.txt # tox # virtualenv -fissix==21.11.13 +fissix==24.4.24 # via # -r requirements/base.txt # bowler -gitdb==4.0.10 +gitdb==4.0.11 # via # -r requirements/base.txt # gitpython github3-py==4.0.1 # via -r requirements/base.txt -gitpython==3.1.32 +gitpython==3.1.43 # via -r requirements/base.txt -idna==3.4 +idna==3.7 # via # -r requirements/base.txt # requests @@ -98,61 +106,57 @@ iniconfig==2.0.0 # via # -r requirements/base.txt # pytest -isort==5.12.0 +isort==5.13.2 # via pylint -jinja2==3.1.2 +jinja2==3.1.4 # via code-annotations -lazy==1.5 +lazy==1.6 # via -r requirements/base.txt -lazy-object-proxy==1.9.0 - # via astroid lockfile==0.12.2 # via -r requirements/base.txt -markupsafe==2.1.3 +markupsafe==2.1.5 # via jinja2 mccabe==0.7.0 # via pylint -more-itertools==10.1.0 +more-itertools==10.3.0 # via -r requirements/base.txt moreorless==0.4.0 # via # -r requirements/base.txt # bowler -msgpack==1.0.5 +msgpack==1.0.8 # via # -r requirements/base.txt # cachecontrol -packaging==23.1 +packaging==24.1 # via # -r requirements/base.txt # build + # pyproject-api # pytest # tox -path==16.7.1 +path==16.14.0 # via # -r requirements/base.txt # path-py path-py==12.5.0 # via -r requirements/base.txt -pbr==5.11.1 +pbr==6.0.0 # via stevedore -pip-tools==7.3.0 +pip-tools==7.4.1 # via -r requirements/development.in -platformdirs==3.10.0 +platformdirs==4.2.2 # via # -r requirements/base.txt # pylint + # tox # virtualenv -pluggy==1.3.0 +pluggy==1.5.0 # via # -r requirements/base.txt # pytest # tox -py==1.11.0 - # via - # -r requirements/base.txt - # tox -pycparser==2.21 +pycparser==2.22 # via # -r requirements/base.txt # cffi @@ -160,7 +164,7 @@ pyjwt[crypto]==2.8.0 # via # -r requirements/base.txt # github3-py -pylint==2.17.5 +pylint==3.2.6 # via # edx-lint # pylint-celery @@ -168,15 +172,21 @@ pylint==2.17.5 # pylint-plugin-utils pylint-celery==0.3 # via edx-lint -pylint-django==2.5.3 +pylint-django==2.5.5 # via edx-lint pylint-plugin-utils==0.8.2 # via # pylint-celery # pylint-django -pyproject-hooks==1.0.0 - # via build -pytest==7.4.0 +pyproject-api==1.7.1 + # via + # -r requirements/base.txt + # tox +pyproject-hooks==1.1.0 + # via + # build + # pip-tools +pytest==8.3.1 # via # -r requirements/base.txt # -r requirements/development.in @@ -185,34 +195,34 @@ pytest==7.4.0 # pytest-xdist pytest-logging==2015.11.4 # via -r requirements/base.txt -pytest-mock==3.11.1 +pytest-mock==3.14.0 # via -r requirements/development.in -pytest-xdist==3.3.1 +pytest-xdist==3.6.1 # via -r requirements/base.txt -python-dateutil==2.8.2 +python-dateutil==2.9.0.post0 # via # -r requirements/base.txt # github3-py -python-dotenv==1.0.0 +python-dotenv==1.0.1 # via -r requirements/base.txt -python-slugify==8.0.1 +python-slugify==8.0.4 # via code-annotations pyyaml==6.0.1 # via # -r requirements/base.txt # code-annotations # responses -requests==2.31.0 +requests==2.32.3 # via # -r requirements/base.txt # cachecontrol # github3-py # responses -responses==0.23.3 +responses==0.25.3 # via -r requirements/development.in -ruamel-yaml==0.17.32 +ruamel-yaml==0.18.6 # via -r requirements/base.txt -ruamel-yaml-clib==0.2.7 +ruamel-yaml-clib==0.2.8 # via # -r requirements/base.txt # ruamel-yaml @@ -221,54 +231,34 @@ six==1.16.0 # -r requirements/base.txt # edx-lint # python-dateutil - # tox -smmap==5.0.0 +smmap==5.0.1 # via # -r requirements/base.txt # gitdb statistics==1.0.3.5 # via -r requirements/base.txt -stevedore==5.1.0 +stevedore==5.2.0 # via code-annotations text-unidecode==1.3 # via python-slugify -tomli==2.0.1 - # via - # -r requirements/base.txt - # build - # pip-tools - # pylint - # pyproject-hooks - # pytest - # tox -tomlkit==0.12.1 +tomlkit==0.13.0 # via pylint -tox==3.28.0 - # via - # -c requirements/common_constraints.txt - # -r requirements/base.txt -tqdm==4.66.1 +tox==4.16.0 + # via -r requirements/base.txt +tqdm==4.66.4 # via -r requirements/base.txt -types-pyyaml==6.0.12.11 - # via responses -typing-extensions==4.7.1 - # via - # -r requirements/base.txt - # astroid - # filelock - # pylint uritemplate==4.1.1 # via # -r requirements/base.txt # github3-py -urllib3==2.0.4 +urllib3==2.2.2 # via # -r requirements/base.txt # requests # responses urlobject==2.4.3 # via -r requirements/base.txt -virtualenv==20.24.4 +virtualenv==20.26.3 # via # -r requirements/base.txt # tox @@ -276,10 +266,8 @@ volatile==2.1.0 # via # -r requirements/base.txt # bowler -wheel==0.41.2 +wheel==0.43.0 # via pip-tools -wrapt==1.15.0 - # via astroid # The following packages are considered to be unsafe in a requirements file: # pip diff --git a/requirements/pip-tools.txt b/requirements/pip-tools.txt index 007ed388..c62445a3 100644 --- a/requirements/pip-tools.txt +++ b/requirements/pip-tools.txt @@ -1,25 +1,22 @@ # -# This file is autogenerated by pip-compile with Python 3.8 +# This file is autogenerated by pip-compile with Python 3.12 # by the following command: # # make upgrade # -build==0.10.0 +build==1.2.1 # via pip-tools click==8.1.7 # via pip-tools -packaging==23.1 +packaging==24.1 # via build -pip-tools==7.3.0 +pip-tools==7.4.1 # via -r requirements/pip-tools.in -pyproject-hooks==1.0.0 - # via build -tomli==2.0.1 +pyproject-hooks==1.1.0 # via # build # pip-tools - # pyproject-hooks -wheel==0.41.2 +wheel==0.43.0 # via pip-tools # The following packages are considered to be unsafe in a requirements file: diff --git a/requirements/pip.txt b/requirements/pip.txt index 13c7e845..53b37154 100644 --- a/requirements/pip.txt +++ b/requirements/pip.txt @@ -1,14 +1,14 @@ # -# This file is autogenerated by pip-compile with Python 3.8 +# This file is autogenerated by pip-compile with Python 3.12 # by the following command: # # make upgrade # -wheel==0.41.2 +wheel==0.43.0 # via -r requirements/pip.in # The following packages are considered to be unsafe in a requirements file: -pip==23.2.1 +pip==24.1.2 # via -r requirements/pip.in -setuptools==68.1.2 +setuptools==71.1.0 # via -r requirements/pip.in diff --git a/setup.py b/setup.py index 09e17228..dc535a88 100644 --- a/setup.py +++ b/setup.py @@ -2,6 +2,7 @@ import os import os.path import re +import sys import setuptools from setuptools import setup @@ -120,12 +121,16 @@ def is_requirement(line): VERSION = get_version('edx_repo_tools', '__init__.py') + # Find our extra requirements. A subdirectory of edx_repo_tools can have an # extra.in file. It will be pip-compiled to extra.txt. Here we find them all # and register them as extras. EXTRAS_REQUIRE = {} for fextra in glob.glob("edx_repo_tools/*/extra.txt"): slug = fextra.split("/")[1] + if sys.version_info >= (3, 12) and glob.glob(f'edx_repo_tools/{slug}/extra-py312.txt'): + fextra = f'edx_repo_tools/{slug}/extra-py312.txt' + EXTRAS_REQUIRE[slug] = load_requirements(fextra) # To run tests & linting across the entire repo, we need to install the union @@ -160,9 +165,11 @@ def is_requirement(line): 'add_common_constraint = edx_repo_tools.add_common_constraint:main', 'add_dependabot_ecosystem = edx_repo_tools.dependabot_yml:main', 'add_django32_settings = edx_repo_tools.codemods.django3.add_new_django32_settings:main', + 'audit_users = edx_repo_tools.audit_gh_users.audit_users:main', 'clone_org = edx_repo_tools.dev.clone_org:main', 'conventional_commits = edx_repo_tools.conventional_commits.commitstats:main', 'find_dependencies = edx_repo_tools.find_dependencies.find_dependencies:main', + 'find_python_dependencies = edx_repo_tools.find_dependencies.find_python_dependencies:main', 'get_org_repo_urls = edx_repo_tools.dev.get_org_repo_urls:main', 'modernize_github_actions = edx_repo_tools.codemods.django3.github_actions_modernizer:main', 'modernize_github_actions_django = edx_repo_tools.codemods.django3.github_actions_modernizer_django:main', @@ -174,6 +181,7 @@ def is_requirement(line): 'modernize_travis = edx_repo_tools.codemods.django3.travis_modernizer:main', 'no_yaml = edx_repo_tools.ospr.no_yaml:no_yaml', 'oep2 = edx_repo_tools.oep2:_cli', + 'pull_request_creator = edx_repo_tools.pull_request_creator:main', 'remove_python2_unicode_compatible = edx_repo_tools.codemods.django3.remove_python2_unicode_compatible:main', 'replace_render_to_response = edx_repo_tools.codemods.django3.replace_render_to_response:main', 'replace_static = edx_repo_tools.codemods.django3.replace_static:main', @@ -185,6 +193,8 @@ def is_requirement(line): 'modernize_tox_django42 = edx_repo_tools.codemods.django42.tox_moderniser_django42:main', 'modernize_github_actions_django42 = edx_repo_tools.codemods.django42.github_actions_modernizer_django42:main', 'remove_providing_args = edx_repo_tools.codemods.django42.remove_providing_args_arg:main', + 'python312_gh_actions_modernizer = edx_repo_tools.codemods.python312.gh_actions_modernizer:main', + 'python312_tox_modernizer = edx_repo_tools.codemods.python312.tox_modernizer:main', ], }, package_data={ diff --git a/tests/fake_repos/repo_with_nvmrc/.github/workflows/release.yml b/tests/fake_repos/repo_with_nvmrc/.github/workflows/release.yml index 84eebbab..c8f4088b 100644 --- a/tests/fake_repos/repo_with_nvmrc/.github/workflows/release.yml +++ b/tests/fake_repos/repo_with_nvmrc/.github/workflows/release.yml @@ -27,7 +27,10 @@ jobs: - name: i18n_extract run: npm run i18n_extract - name: Coverage - uses: codecov/codecov-action@v2 + uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.CODECOV_TOKEN }} + fail_ci_if_error: false - name: Build run: npm run build - name: Release diff --git a/tests/fake_repos/repo_without_nvmrc/.github/workflows/release.yml b/tests/fake_repos/repo_without_nvmrc/.github/workflows/release.yml index 84eebbab..c8f4088b 100644 --- a/tests/fake_repos/repo_without_nvmrc/.github/workflows/release.yml +++ b/tests/fake_repos/repo_without_nvmrc/.github/workflows/release.yml @@ -27,7 +27,10 @@ jobs: - name: i18n_extract run: npm run i18n_extract - name: Coverage - uses: codecov/codecov-action@v2 + uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.CODECOV_TOKEN }} + fail_ci_if_error: false - name: Build run: npm run build - name: Release diff --git a/tests/pull_request_creator_test_data/diff.txt b/tests/pull_request_creator_test_data/diff.txt new file mode 100644 index 00000000..a77487ba --- /dev/null +++ b/tests/pull_request_creator_test_data/diff.txt @@ -0,0 +1,413 @@ +diff --git a/requirements/base.txt b/requirements/base.txt +index e94bd48..132b72b 100644 +--- a/requirements/base.txt ++++ b/requirements/base.txt +@@ -1,6 +1,6 @@ + # +-# This file is autogenerated by pip-compile with python 3.8 +-# To update, run: ++# This file is autogenerated by pip-compile with Python 3.8 ++# by the following command: + # + # make upgrade + # +@@ -12,14 +12,12 @@ iniconfig==1.1.1 + # via pytest + numpy==1.23.5 + # via pandas +-packaging==21.3 ++packaging==22.0 + # via pytest + pandas==1.5.2 + # via -r requirements/base.in + pluggy==1.0.0 + # via pytest +-pyparsing==3.0.9 +- # via packaging + pytest==7.2.0 + # via + # -r requirements/base.in +diff --git a/requirements/dev.txt b/requirements/dev.txt +index 2259a89..7e171fa 100644 +--- a/requirements/dev.txt ++++ b/requirements/dev.txt +@@ -1,6 +1,6 @@ + # +-# This file is autogenerated by pip-compile with python 3.8 +-# To update, run: ++# This file is autogenerated by pip-compile with Python 3.8 ++# by the following command: + # + # make upgrade + # +@@ -19,12 +19,19 @@ build==0.9.0 + # via + # -r requirements/pip-tools.txt + # pip-tools +-certifi==2022.9.24 ++cachetools==5.2.0 ++ # via ++ # -r requirements/travis.txt ++ # tox ++certifi==2022.12.7 + # via + # -r requirements/travis.txt + # requests +-chardet==5.0.0 +- # via diff-cover ++chardet==5.1.0 ++ # via ++ # -r requirements/travis.txt ++ # diff-cover ++ # tox + charset-normalizer==2.1.1 + # via + # -r requirements/travis.txt +@@ -47,6 +54,10 @@ code-annotations==1.3.0 + # edx-lint + codecov==2.1.12 + # via -r requirements/travis.txt ++colorama==0.4.6 ++ # via ++ # -r requirements/travis.txt ++ # tox + coverage[toml]==6.5.0 + # via + # -r requirements/quality.txt +@@ -75,7 +86,7 @@ exceptiongroup==1.0.4 + # via + # -r requirements/quality.txt + # pytest +-filelock==3.8.0 ++filelock==3.8.2 + # via + # -r requirements/travis.txt + # tox +@@ -113,12 +124,13 @@ numpy==1.23.5 + # via + # -r requirements/quality.txt + # pandas +-packaging==21.3 ++packaging==22.0 + # via + # -r requirements/pip-tools.txt + # -r requirements/quality.txt + # -r requirements/travis.txt + # build ++ # pyproject-api + # pytest + # tox + pandas==1.5.2 +@@ -133,13 +145,14 @@ pep517==0.13.0 + # via + # -r requirements/pip-tools.txt + # build +-pip-tools==6.10.0 ++pip-tools==6.11.0 + # via -r requirements/pip-tools.txt +-platformdirs==2.5.4 ++platformdirs==2.6.0 + # via + # -r requirements/quality.txt + # -r requirements/travis.txt + # pylint ++ # tox + # virtualenv + pluggy==1.0.0 + # via +@@ -150,17 +163,13 @@ pluggy==1.0.0 + # tox + polib==1.1.1 + # via edx-i18n-tools +-py==1.11.0 +- # via +- # -r requirements/travis.txt +- # tox + pycodestyle==2.10.0 + # via -r requirements/quality.txt + pydocstyle==6.1.1 + # via -r requirements/quality.txt + pygments==2.13.0 + # via diff-cover +-pylint==2.15.7 ++pylint==2.15.8 + # via + # -r requirements/quality.txt + # edx-lint +@@ -180,12 +189,10 @@ pylint-plugin-utils==0.7 + # -r requirements/quality.txt + # pylint-celery + # pylint-django +-pyparsing==3.0.9 ++pyproject-api==1.2.1 + # via +- # -r requirements/pip-tools.txt +- # -r requirements/quality.txt + # -r requirements/travis.txt +- # packaging ++ # tox + pytest==7.2.0 + # via + # -r requirements/quality.txt +@@ -228,10 +235,8 @@ requests==2.28.1 + six==1.16.0 + # via + # -r requirements/quality.txt +- # -r requirements/travis.txt + # edx-lint + # python-dateutil +- # tox + snowballstemmer==2.2.0 + # via + # -r requirements/quality.txt +@@ -255,13 +260,14 @@ tomli==2.0.1 + # coverage + # pep517 + # pylint ++ # pyproject-api + # pytest + # tox + tomlkit==0.11.6 + # via + # -r requirements/quality.txt + # pylint +-tox==3.27.1 ++tox==4.0.2 + # via + # -r requirements/travis.txt + # tox-battery +@@ -276,7 +282,7 @@ urllib3==1.26.13 + # via + # -r requirements/travis.txt + # requests +-virtualenv==20.17.0 ++virtualenv==20.17.1 + # via + # -r requirements/travis.txt + # tox +diff --git a/requirements/doc.txt b/requirements/doc.txt +index ae644b7..e91647e 100644 +--- a/requirements/doc.txt ++++ b/requirements/doc.txt +@@ -1,6 +1,6 @@ + # +-# This file is autogenerated by pip-compile with python 3.8 +-# To update, run: ++# This file is autogenerated by pip-compile with Python 3.8 ++# by the following command: + # + # make upgrade + # +@@ -14,7 +14,7 @@ babel==2.11.0 + # via sphinx + bleach==5.0.1 + # via readme-renderer +-certifi==2022.9.24 ++certifi==2022.12.7 + # via requests + charset-normalizer==2.1.1 + # via requests +@@ -65,7 +65,7 @@ numpy==1.23.5 + # via + # -r requirements/test.txt + # pandas +-packaging==21.3 ++packaging==22.0 + # via + # -r requirements/test.txt + # pytest +@@ -85,10 +85,6 @@ pygments==2.13.0 + # doc8 + # readme-renderer + # sphinx +-pyparsing==3.0.9 +- # via +- # -r requirements/test.txt +- # packaging + pytest==7.2.0 + # via + # -r requirements/test.txt +diff --git a/requirements/pip-tools.txt b/requirements/pip-tools.txt +index 33e5fff..6a926f1 100644 +--- a/requirements/pip-tools.txt ++++ b/requirements/pip-tools.txt +@@ -8,14 +8,12 @@ build==0.9.0 + # via pip-tools + click==8.1.3 + # via pip-tools +-packaging==21.3 ++packaging==22.0 + # via build + pep517==0.13.0 + # via build +-pip-tools==6.10.0 ++pip-tools==6.11.0 + # via -r requirements/pip-tools.in +-pyparsing==3.0.9 +- # via packaging + tomli==2.0.1 + # via + # build +diff --git a/requirements/quality.txt b/requirements/quality.txt +index 496785e..70f135b 100644 +--- a/requirements/quality.txt ++++ b/requirements/quality.txt +@@ -1,6 +1,6 @@ + # +-# This file is autogenerated by pip-compile with python 3.8 +-# To update, run: ++# This file is autogenerated by pip-compile with Python 3.8 ++# by the following command: + # + # make upgrade + # +@@ -60,7 +60,7 @@ numpy==1.23.5 + # via + # -r requirements/test.txt + # pandas +-packaging==21.3 ++packaging==22.0 + # via + # -r requirements/test.txt + # pytest +@@ -70,7 +70,7 @@ pbr==5.11.0 + # via + # -r requirements/test.txt + # stevedore +-platformdirs==2.5.4 ++platformdirs==2.6.0 + # via pylint + pluggy==1.0.0 + # via +@@ -80,7 +80,7 @@ pycodestyle==2.10.0 + # via -r requirements/quality.in + pydocstyle==6.1.1 + # via -r requirements/quality.in +-pylint==2.15.7 ++pylint==2.15.8 + # via + # edx-lint + # pylint-celery +@@ -94,10 +94,6 @@ pylint-plugin-utils==0.7 + # via + # pylint-celery + # pylint-django +-pyparsing==3.0.9 +- # via +- # -r requirements/test.txt +- # packaging + pytest==7.2.0 + # via + # -r requirements/test.txt +diff --git a/requirements/test.txt b/requirements/test.txt +index adee822..407a915 100644 +--- a/requirements/test.txt ++++ b/requirements/test.txt +@@ -1,6 +1,6 @@ + # +-# This file is autogenerated by pip-compile with python 3.8 +-# To update, run: ++# This file is autogenerated by pip-compile with Python 3.8 ++# by the following command: + # + # make upgrade + # +@@ -30,7 +30,7 @@ numpy==1.23.5 + # via + # -r requirements/base.txt + # pandas +-packaging==21.3 ++packaging==22.0 + # via + # -r requirements/base.txt + # pytest +@@ -42,10 +42,6 @@ pluggy==1.0.0 + # via + # -r requirements/base.txt + # pytest +-pyparsing==3.0.9 +- # via +- # -r requirements/base.txt +- # packaging + pytest==7.2.0 + # via + # -r requirements/base.txt +diff --git a/requirements/travis.txt b/requirements/travis.txt +index 284c0b0..191d25a 100644 +--- a/requirements/travis.txt ++++ b/requirements/travis.txt +@@ -1,42 +1,50 @@ + # +-# This file is autogenerated by pip-compile with python 3.8 +-# To update, run: ++# This file is autogenerated by pip-compile with Python 3.8 ++# by the following command: + # + # make upgrade + # +-certifi==2022.9.24 ++cachetools==5.2.0 ++ # via tox ++certifi==2022.12.7 + # via requests ++codecov==2.1.12 ++ # via -r requirements/travis.in ++chardet==5.1.0 ++ # via tox + charset-normalizer==2.1.1 + # via requests +-codecov==2.1.12 +- # via -r requirements/travis.in ++colorama==0.4.6 ++ # via tox + coverage==6.5.0 + # via codecov + distlib==0.3.6 + # via virtualenv +-filelock==3.8.0 ++filelock==3.8.2 + # via + # tox + # virtualenv + idna==3.4 + # via requests +-packaging==21.3 +- # via tox +-platformdirs==2.5.4 +- # via virtualenv ++packaging==22.0 ++ # via ++ # pyproject-api ++ # tox ++platformdirs==2.6.0 ++ # via ++ # tox ++ # virtualenv + pluggy==1.0.0 + # via tox +-py==1.11.0 ++pyproject-api==1.2.1 + # via tox +-pyparsing==3.0.9 +- # via packaging + requests==2.28.1 + # via codecov +-six==1.16.0 +- # via tox + tomli==2.0.1 +- # via tox +-tox==3.27.1 ++ # via ++ # pyproject-api ++ # tox ++tox==4.0.2 + # via + # -r requirements/travis.in + # tox-battery +@@ -44,5 +52,5 @@ tox-battery==0.6.1 + # via -r requirements/travis.in + urllib3==1.26.13 + # via requests +-virtualenv==20.17.0 ++virtualenv==20.17.1 + # via tox diff --git a/tests/pull_request_creator_test_data/minor_diff.txt b/tests/pull_request_creator_test_data/minor_diff.txt new file mode 100644 index 00000000..905159ad --- /dev/null +++ b/tests/pull_request_creator_test_data/minor_diff.txt @@ -0,0 +1,17 @@ +diff --git a/requirements/base.txt b/requirements/base.txt +index e94bd48..132b72b 100644 +--- a/requirements/base.txt ++++ b/requirements/base.txt +@@ -1,6 +1,6 @@ + # +-# This file is autogenerated by pip-compile with python 3.8 +-# To update, run: ++# This file is autogenerated by pip-compile with Python 3.8 ++# by the following command: + # + # make upgrade + # +@@ -12,14 +12,12 @@ iniconfig==1.1.1 +-packaging==21.3 ++packaging==21.6 + diff --git a/tests/sample_files/sample_ci_file.yml b/tests/sample_files/sample_ci_file.yml index 0442a2ac..bee59809 100644 --- a/tests/sample_files/sample_ci_file.yml +++ b/tests/sample_files/sample_ci_file.yml @@ -14,8 +14,12 @@ jobs: matrix: os: [ubuntu-20.04] python-version: ['3.5', '3.8'] - django-version: ['3.2', '4.0'] - toxenv: ['django32', 'quality', 'docs', 'pii_check'] + django-version: [django42] + toxenv: + - 'quality' + - 'docs' + - 'pii_check' + - django42 steps: - uses: actions/checkout@v2 - name: setup python diff --git a/tests/sample_files/sample_ci_file_2.yml b/tests/sample_files/sample_ci_file_2.yml index 338b2cc8..37ee6938 100644 --- a/tests/sample_files/sample_ci_file_2.yml +++ b/tests/sample_files/sample_ci_file_2.yml @@ -13,19 +13,19 @@ jobs: strategy: matrix: os: [ubuntu-20.04] - python-version: ['3.5', '3.6', '3.7', '3.8'] + python-version: ['3.5', '3.6', '3.7', '3.8', '3.12'] django-version: ['3.2'] include: - - python-version: "3.5" - toxenv: 'quality' - ubuntu: 20.04 + - python-version: "3.5" + toxenv: 'quality' + ubuntu: 20.04 exclude: - - python-version: "3.5" - toxenv: 'quality' - - python-version: "3.7" - toxenv: 'docs' - - python-version: "3.8" - toxenv: 'pii_check' + - python-version: "3.5" + toxenv: 'quality' + - python-version: "3.7" + toxenv: 'docs' + - python-version: "3.8" + toxenv: 'pii_check' steps: - uses: actions/checkout@v2 - name: setup python @@ -45,8 +45,9 @@ jobs: run: tox - name: Run Coverage - if: matrix.python-version == '3.8' && matrix.toxenv=='django32' - uses: codecov/codecov-action@v1 + if: matrix.python-version == '3.8' && matrix.toxenv=='django42' + uses: codecov/codecov-action@v4 with: + token: ${{ secrets.CODECOV_TOKEN }} flags: unittests fail_ci_if_error: true diff --git a/tests/sample_files/sample_node_ci.yml b/tests/sample_files/sample_node_ci.yml index 23c7a96d..09d21dd9 100644 --- a/tests/sample_files/sample_node_ci.yml +++ b/tests/sample_files/sample_node_ci.yml @@ -37,7 +37,10 @@ jobs: run: npm run test - name: Run Coverage - uses: codecov/codecov-action@v2 + uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.CODECOV_TOKEN }} + fail_ci_if_error: true - name: Send failure notification if: ${{ failure() }} diff --git a/tests/sample_files/sample_node_ci2.yml b/tests/sample_files/sample_node_ci2.yml index 1a0c97dd..9bbace65 100644 --- a/tests/sample_files/sample_node_ci2.yml +++ b/tests/sample_files/sample_node_ci2.yml @@ -42,4 +42,7 @@ jobs: run: npm run build - name: Run Coverage - uses: codecov/codecov-action@v2 + uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.CODECOV_TOKEN }} + fail_ci_if_error: true diff --git a/tests/sample_tox_config.ini b/tests/sample_tox_config.ini index 7639169f..a62f0d77 100644 --- a/tests/sample_tox_config.ini +++ b/tests/sample_tox_config.ini @@ -1,6 +1,6 @@ [tox] envlist = - py{27,35,36,37}-django{111,20,21,22}-drf{39,310,latest}, + py{38}-django{32,40}-drf{39,310,latest}, docs, quality, version_check, @@ -9,10 +9,8 @@ envlist = [testenv] deps = - django111: Django>=1.11,<2.0 - django20: Django>=2.0,<2.1 - django21: Django>=2.1,<2.2 - django22: Django>=2.2,<2.3 + django32: Django>=3.2,<3.3 + django40: Django>=4.0,<4.1 drf39: djangorestframework<3.10.0 drf310: djangorestframework<3.11.0 drflatest: djangorestframework diff --git a/tests/sample_tox_config_2.ini b/tests/sample_tox_config_2.ini index 59432569..173f6763 100644 --- a/tests/sample_tox_config_2.ini +++ b/tests/sample_tox_config_2.ini @@ -1,6 +1,6 @@ [tox] envlist = - py27,py35,py36,py37-django111,django20,django21,django22-drf39,drf310,drflatest, + py37,py38-django32,django40-drf39,drf310,drflatest, docs, quality, version_check, @@ -9,10 +9,8 @@ envlist = [testenv] deps = - django111: Django>=1.11,<2.0 - django20: Django>=2.0,<2.1 - django21: Django>=2.1,<2.2 - django22: Django>=2.2,<2.3 + django32: Django>=3.2,<3.3 + django40: Django>=4.0,<4.1 drf39: djangorestframework<3.10.0 drf310: djangorestframework<3.11.0 drflatest: djangorestframework diff --git a/tests/test_actions_modernizer.py b/tests/test_actions_modernizer.py index b7a2a220..b8f375ed 100644 --- a/tests/test_actions_modernizer.py +++ b/tests/test_actions_modernizer.py @@ -6,7 +6,7 @@ import uuid from unittest import TestCase -from edx_repo_tools.codemods.django3 import GithubCIModernizer +from edx_repo_tools.codemods.python312 import GithubCIModernizer from edx_repo_tools.utils import YamlLoader @@ -37,22 +37,16 @@ def test_python_matrix_items(self): python_versions = ci_elements['jobs']['run_tests']['strategy']['matrix']['python-version'] self.assertIsInstance(python_versions, list) - self.assertNotIn('3.5', python_versions) + self.assertIn('3.8', python_versions) + self.assertIn('3.12', python_versions) def test_python_matrix_items_build_tag(self): ci_elements = TestGithubActionsModernizer._get_updated_yaml_elements(self.test_file3) python_versions = ci_elements['jobs']['build']['strategy']['matrix']['python-version'] self.assertIsInstance(python_versions, list) - self.assertNotIn('3.5', python_versions) - - def test_include_exclude_list(self): - ci_elements = TestGithubActionsModernizer._get_updated_yaml_elements(self.test_file2) - include_list = ci_elements['jobs']['run_tests']['strategy']['matrix'].get('include', {}) - exclude_list = ci_elements['jobs']['run_tests']['strategy']['matrix'].get('exclude', {}) - - for item in list(include_list) + list(exclude_list): - self.assertNotEqual(item['python-version'], '3.5') + self.assertIn('3.8', python_versions) + self.assertIn('3.12', python_versions) def tearDown(self): os.remove(self.test_file1) diff --git a/tests/test_actions_modernizer_django.py b/tests/test_actions_modernizer_django.py index 4cedbdf2..ff0ddd9b 100644 --- a/tests/test_actions_modernizer_django.py +++ b/tests/test_actions_modernizer_django.py @@ -5,7 +5,7 @@ import shutil from os.path import basename, dirname, join -from edx_repo_tools.codemods.django3 import GithubCIDjangoModernizer +from edx_repo_tools.codemods.python312 import GithubCIModernizer from edx_repo_tools.utils import YamlLoader @@ -18,7 +18,7 @@ def setup_local_copy(filepath, tmpdir): def get_updated_yaml_elements(file_path): - modernizer = GithubCIDjangoModernizer(file_path) + modernizer = GithubCIModernizer(file_path) modernizer.modernize() yaml_loader = YamlLoader(file_path) return yaml_loader.elements @@ -32,8 +32,8 @@ def test_matrix_items(tmpdir): ci_elements = get_updated_yaml_elements(test_file) tox_envs = ci_elements['jobs']['run_tests']['strategy']['matrix']['toxenv'] - assert 'django32' in tox_envs - assert 'django40' in tox_envs + assert 'django32' not in tox_envs + assert 'django42' in tox_envs def test_matrix_items_multiple_jobs(tmpdir): @@ -45,18 +45,17 @@ def test_matrix_items_multiple_jobs(tmpdir): # test the case with django env present in one job job1_tox_envs = ci_elements['jobs']['build']['strategy']['matrix']['tox-env'] - assert 'django32' in job1_tox_envs - assert 'django40' in job1_tox_envs + assert 'django32' not in job1_tox_envs + assert 'django42' in job1_tox_envs # test the case with django env present in second job job2_tox_envs = ci_elements['jobs']['django_test']['strategy']['matrix']['django-version'] - assert 'django32' in job2_tox_envs - assert 'django40' in job2_tox_envs + assert 'django32' not in job2_tox_envs + assert 'django42' in job2_tox_envs # test the case with no django env present in third job. job3_tox_envs = ci_elements['jobs']['test']['strategy']['matrix']['tox'] - assert 'django32' not in job3_tox_envs - assert 'django40' not in job3_tox_envs + assert 'django42' in job3_tox_envs def test_include_exclude_list(tmpdir): """ @@ -69,6 +68,6 @@ def test_include_exclude_list(tmpdir): for item in list(include_list) + list(exclude_list): if 'django-version' in item: - assert item['django-version'] != '3.1' + assert item['django-version'] != '3.2' if 'toxenv' in item: - assert item['toxenv'] != 'django30' + assert item['toxenv'] != 'django42' diff --git a/tests/test_pull_request_creator.py b/tests/test_pull_request_creator.py new file mode 100644 index 00000000..69987b8a --- /dev/null +++ b/tests/test_pull_request_creator.py @@ -0,0 +1,432 @@ +# pylint: disable=missing-module-docstring,missing-class-docstring + +from os import path +from unittest import TestCase +from unittest.mock import MagicMock, Mock, mock_open, patch + +from edx_repo_tools.pull_request_creator import GitHubHelper, PullRequestCreator + + +class HelpersTestCase(TestCase): + + def test_close_existing_pull_requests(self): + """ + Make sure we close only PR's by the correct author. + """ + + incorrect_pr_one = Mock() + incorrect_pr_one.user.name = "Not John" + incorrect_pr_one.user.login = "notJohn" + incorrect_pr_one.number = 1 + incorrect_pr_one.head.ref = "incorrect-branch-name" + incorrect_pr_one.base.ref = "master" + + incorrect_pr_two = Mock() + incorrect_pr_two.user.name = "John Smith" + incorrect_pr_two.user.login = "johnsmithiscool100" + incorrect_pr_two.number = 2 + incorrect_pr_two.head.ref = "incorrect-branch-name-2" + incorrect_pr_two.base.ref = "master" + + incorrect_pr_three = Mock() + incorrect_pr_three.user.name = "John Smith" + incorrect_pr_three.user.login = "fakeuser100" + incorrect_pr_three.number = 5 + incorrect_pr_three.head.ref = "jenkins/upgrade-python-requirements-ce0515e" + incorrect_pr_three.base.ref = "some-other-branch" + + correct_pr_one = Mock() + correct_pr_one.user.name = "John Smith" + correct_pr_one.user.login = "fakeuser100" + correct_pr_one.number = 3 + correct_pr_one.head.ref = "repo-tools/upgrade-python-requirements-ce0515e" + correct_pr_one.base.ref = "master" + + correct_pr_two = Mock() + correct_pr_two.user.name = "John Smith" + correct_pr_two.user.login = "fakeuser100" + correct_pr_two.number = 4 + correct_pr_two.head.ref = "repo-tools/upgrade-python-requirements-0c51f37" + correct_pr_two.base.ref = "master" + + mock_repo = Mock() + mock_repo.get_pulls = MagicMock(return_value=[ + incorrect_pr_one, + incorrect_pr_two, + incorrect_pr_three, + correct_pr_one, + correct_pr_two + ]) + + deleted_pulls = GitHubHelper().close_existing_pull_requests(mock_repo, "fakeuser100", "John Smith") + assert deleted_pulls == [3, 4] + assert not incorrect_pr_one.edit.called + assert not incorrect_pr_two.edit.called + assert not incorrect_pr_three.edit.called + assert correct_pr_one.edit.called + assert correct_pr_two.edit.called + + def test_get_updated_files_list_no_change(self): + git_instance = Mock() + git_instance.ls_files = MagicMock(return_value="") + with patch('edx_repo_tools.pull_request_creator.Git', return_value=git_instance): + result = GitHubHelper().get_updated_files_list("edx-platform") + assert result == [] + + def test_get_updated_files_list_with_changes(self): + git_instance = Mock() + git_instance.ls_files = MagicMock(return_value="file1\nfile2") + with patch('edx_repo_tools.pull_request_creator.Git', return_value=git_instance): + result = GitHubHelper().get_updated_files_list("edx-platform") + assert result == ["file1", "file2"] + + def test_update_list_of_files_no_change(self): + repo_mock = Mock() + repo_root = "../../edx-platform" + file_path_list = [] + commit_message = "commit" + sha = "abc123" + username = "fakeusername100" + + return_sha = GitHubHelper().update_list_of_files(repo_mock, repo_root, file_path_list, commit_message, sha, + username) + assert return_sha is None + assert not repo_mock.create_git_tree.called + assert not repo_mock.create_git_commit.called + + @patch('edx_repo_tools.pull_request_creator.GitHubHelper.get_file_contents', + return_value=None) + @patch('edx_repo_tools.pull_request_creator.InputGitAuthor', + return_value=Mock()) + @patch('edx_repo_tools.pull_request_creator.InputGitTreeElement', + return_value=Mock()) + # pylint: disable=unused-argument + def test_update_list_of_files_with_changes(self, get_file_contents_mock, author_mock, git_tree_mock): + repo_mock = Mock() + repo_root = "../../edx-platform" + file_path_list = ["path/to/file1", "path/to/file2"] + commit_message = "commit" + sha = "abc123" + username = "fakeusername100" + + return_sha = GitHubHelper().update_list_of_files(repo_mock, repo_root, file_path_list, commit_message, sha, + username) + assert repo_mock.create_git_tree.called + assert repo_mock.create_git_commit.called + assert return_sha is not None + # pylint: enable=unused-argument + + def test_get_file_contents(self): + with patch("builtins.open", mock_open(read_data="data")) as mock_file: + contents = GitHubHelper().get_file_contents("../../edx-platform", "path/to/file") + mock_file.assert_called_with("../../edx-platform/path/to/file", "r", encoding='utf-8') + assert contents == "data" + + +class UpgradePythonRequirementsPullRequestTestCase(TestCase): + """ + Test Case class for PR creator. + """ + + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.close_existing_pull_requests', + return_value=[]) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.get_github_instance', return_value=None) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.repo_from_remote', return_value=None) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.get_updated_files_list', return_value=None) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.get_current_commit', return_value='1234567') + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.branch_exists', return_value=None) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.update_list_of_files', return_value=None) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.create_pull_request') + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.create_branch', return_value=None) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator._get_user', + return_value=Mock(name="fake name", login="fake login")) + @patch('edx_repo_tools.pull_request_creator.GitHubHelper.delete_branch', return_value=None) + def test_no_changes(self, delete_branch_mock, get_user_mock, create_branch_mock, create_pr_mock, + update_files_mock, branch_exists_mock, current_commit_mock, + modified_list_mock, repo_mock, authenticate_mock, + close_existing_prs_mock): + """ + Ensure a merge with no changes to db files will not result in any updates. + """ + pull_request_creator = PullRequestCreator('--repo_root=../../edx-platform', 'upgrade-branch', [], + [], 'Upgrade python requirements', 'Update python requirements', + 'make upgrade PR') + pull_request_creator.create(True) + + assert authenticate_mock.called + assert repo_mock.called + assert modified_list_mock.called + assert not branch_exists_mock.called + assert not create_branch_mock.called + assert not update_files_mock.called + assert not create_pr_mock.called + + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.close_existing_pull_requests', + return_value=[]) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.get_github_instance', + return_value=Mock()) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.repo_from_remote', return_value=Mock()) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.get_updated_files_list', + return_value=["requirements/edx/base.txt", "requirements/edx/coverage.txt"]) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.get_current_commit', return_value='1234567') + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.branch_exists', return_value=False) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.update_list_of_files', return_value=None) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.create_pull_request') + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.create_branch', return_value=None) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator._get_user', + return_value=Mock(name="fake name", login="fake login")) + @patch('edx_repo_tools.pull_request_creator.GitHubHelper.delete_branch', return_value=None) + def test_changes(self, delete_branch_mock, get_user_mock, create_branch_mock, create_pr_mock, + update_files_mock, branch_exists_mock, current_commit_mock, + modified_list_mock, repo_mock, authenticate_mock, + close_existing_prs_mock): + """ + Ensure a merge with no changes to db files will not result in any updates. + """ + pull_request_creator = PullRequestCreator('--repo_root=../../edx-platform', 'upgrade-branch', [], + [], 'Upgrade python requirements', 'Update python requirements', + 'make upgrade PR') + pull_request_creator.create(True) + + assert branch_exists_mock.called + assert create_branch_mock.called + self.assertEqual(create_branch_mock.call_count, 1) + assert update_files_mock.called + self.assertEqual(update_files_mock.call_count, 1) + assert create_pr_mock.called + + create_pr_mock.title = "Python Requirements Update" + create_pr_mock.diff_url = "/" + create_pr_mock.repository.name = 'repo-health-data' + + basepath = path.dirname(__file__) + + filepath = path.abspath(path.join(basepath, "pull_request_creator_test_data", "diff.txt")) + with open(filepath, "r") as f: + content = f.read().encode('utf-8') + with patch('requests.get') as mock_request: + mock_request.return_value.content = content + mock_request.return_value.status_code = 200 + GitHubHelper().verify_upgrade_packages(create_pr_mock) + assert create_pr_mock.create_issue_comment.called + assert not delete_branch_mock.called + + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator._get_user', + return_value=Mock(name="fake name", login="fake login")) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.get_github_instance', + return_value=Mock()) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.repo_from_remote', return_value=Mock()) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.get_updated_files_list', + return_value=["requirements/edx/base.txt", "requirements/edx/coverage.txt"]) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.get_current_commit', return_value='1234567') + # all above this unused params, no need to interact with those mocks + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.branch_exists', return_value=False) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.update_list_of_files', return_value=None) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.create_pull_request') + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.create_branch', return_value=None) + @patch('builtins.print') + def test_outputs_url_on_success(self, print_mock, create_branch_mock, create_pr_mock, + update_files_mock, branch_exists_mock, *args): + """ + Ensure that a successful run outputs the URL consumable by github actions + """ + pull_request_creator = PullRequestCreator('--repo_root=../../edx-platform', 'upgrade-branch', [], + [], 'Upgrade python requirements', 'Update python requirements', + 'make upgrade PR', output_pr_url_for_github_action=True) + pull_request_creator.create(False) + + assert branch_exists_mock.called + assert create_branch_mock.called + self.assertEqual(create_branch_mock.call_count, 1) + assert update_files_mock.called + self.assertEqual(update_files_mock.call_count, 1) + assert create_pr_mock.called + assert print_mock.call_count == 1 + found_matching_call = False + for call in print_mock.call_args_list: + if '$GITHUB_OUTPUT' in call.args[0]: + found_matching_call = True + assert found_matching_call + + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.close_existing_pull_requests', + return_value=[]) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.get_github_instance', return_value=None) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.repo_from_remote', return_value=None) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.get_updated_files_list', + return_value=["requirements/edx/base.txt", "requirements/edx/coverage.txt"]) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.get_current_commit', return_value='1234567') + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.branch_exists', return_value=True) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.update_list_of_files', return_value=None) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.create_pull_request') + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.create_branch', return_value=None) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator._get_user', + return_value=Mock(name="fake name", login="fake login")) + @patch('edx_repo_tools.pull_request_creator.GitHubHelper.delete_branch', return_value=None) + def test_branch_exists(self, delete_branch_mock, get_user_mock, create_branch_mock, create_pr_mock, + update_files_mock, branch_exists_mock, current_commit_mock, + modified_list_mock, repo_mock, authenticate_mock, + close_existing_prs_mock): + """ + Ensure if a branch exists and delete_old_pull_requests is set to False, then there are no updates. + """ + pull_request_creator = PullRequestCreator('--repo_root=../../edx-platform', 'upgrade-branch', [], + [], 'Upgrade python requirements', 'Update python requirements', + 'make upgrade PR') + pull_request_creator.create(False) + + assert branch_exists_mock.called + assert not create_branch_mock.called + assert not create_pr_mock.called + assert not delete_branch_mock.called + + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.close_existing_pull_requests', + return_value=[]) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator._get_user', + return_value=Mock(name="fake name", login="fake login")) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.get_github_instance', + return_value=Mock()) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.repo_from_remote', return_value=Mock()) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.get_updated_files_list', + return_value=["requirements/edx/base.txt", "requirements/edx/coverage.txt"]) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.get_current_commit', return_value='1234567') + # all above this unused params, no need to interact with those mocks + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.branch_exists', return_value=True) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.update_list_of_files', return_value=None) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.create_pull_request') + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.create_branch', return_value=None) + @patch('edx_repo_tools.pull_request_creator.GitHubHelper.delete_branch', return_value=None) + @patch('builtins.print') + def test_branch_deletion(self, create_branch_mock, create_pr_mock, + update_files_mock, branch_exists_mock, delete_branch_mock, *args): + """ + Ensure if a branch exists and delete_old_pull_requests is set, then branch is deleted + before creating new PR. + """ + pull_request_creator = PullRequestCreator('--repo_root=../../edx-platform', 'upgrade-branch', [], + [], 'Upgrade python requirements', 'Update python requirements', + 'make upgrade PR', output_pr_url_for_github_action=True) + pull_request_creator.create(True) + + assert branch_exists_mock.called + assert delete_branch_mock.called + assert create_branch_mock.called + assert update_files_mock.called + assert create_pr_mock.called + + def test_compare_upgrade_difference_with_major_changes(self): + basepath = path.dirname(__file__) + filepath = path.abspath(path.join(basepath, "pull_request_creator_test_data", "diff.txt")) + with open(filepath, "r") as f: + valid, suspicious = GitHubHelper().compare_pr_differnce(f.read()) + assert sorted( + ['certifi', 'chardet', 'filelock', 'pip-tools', 'platformdirs', 'pylint', 'virtualenv'] + ) == [g['name'] for g in valid] + + assert sorted( + ['cachetools', 'six', 'tox', 'pyproject-api', 'colorama', 'py', 'chardet', 'pyparsing', 'packaging'] + ) == [g['name'] for g in suspicious] + + def test_compare_upgrade_difference_with_minor_changes(self): + basepath = path.dirname(__file__) + filepath = path.abspath(path.join(basepath, "pull_request_creator_test_data", "minor_diff.txt")) + with open(filepath, "r") as f: + valid, suspicious = GitHubHelper().compare_pr_differnce(f.read()) + assert sorted( + ['packaging'] + ) == [g['name'] for g in valid] + + assert sorted( + [] + ) == [g['name'] for g in suspicious] + + def test_check_automerge_variable_value(self): + with patch('requests.get') as mock_request: + mock_request.return_value.status_code = 200 + mock_request.return_value.json.return_value = { + 'name': 'ENABLE_AUTOMERGE_FOR_DEPENDENCIES_PRS', 'value': 'True', + 'created_at': '2023-03-17T12:58:50Z', 'updated_at': '2023-03-17T13:01:12Z' + } + self.assertTrue( + GitHubHelper().check_automerge_variable_value( + 'https://foo/bar/testrepo/pulls/1' + ) + ) + + # in case of false value of variable. + mock_request.return_value.json.return_value = { + 'name': 'ENABLE_AUTOMERGE_FOR_DEPENDENCIES_PRS', 'value': 'False', + 'created_at': '2023-03-17T12:58:50Z', 'updated_at': '2023-03-17T13:01:12Z' + } + self.assertFalse( + GitHubHelper().check_automerge_variable_value( + 'https://foo/bar/testrepo/pulls/1' + ) + ) + # in case of no variable exists. + mock_request.return_value.status_code = 404 + mock_request.return_value.json.return_value = { + 'name': 'ENABLE_AUTOMERGE_FOR_DEPENDENCIES_PRS', 'value': 'False', + 'created_at': '2023-03-17T12:58:50Z', 'updated_at': '2023-03-17T13:01:12Z' + } + self.assertFalse( + GitHubHelper().check_automerge_variable_value( + 'https://foo/bar/testrepo/pulls/1' + ) + ) + + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.close_existing_pull_requests', + return_value=[]) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.get_github_instance', + return_value=Mock()) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.repo_from_remote', return_value=Mock()) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.get_updated_files_list', + return_value=["requirements/edx/base.txt", "requirements/edx/coverage.txt"]) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.get_current_commit', return_value='1234567') + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.branch_exists', return_value=False) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.update_list_of_files', return_value=None) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.create_pull_request') + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator.github_helper.create_branch', return_value=None) + @patch('edx_repo_tools.pull_request_creator.PullRequestCreator._get_user', + return_value=Mock(name="fake name", login="fake login")) + @patch('edx_repo_tools.pull_request_creator.GitHubHelper.delete_branch', return_value=None) + def test_changes_with_minor_versions_and_variable( + self, delete_branch_mock, get_user_mock, create_branch_mock, create_pr_mock, + update_files_mock, branch_exists_mock, current_commit_mock, + modified_list_mock, repo_mock, authenticate_mock, + close_existing_prs_mock + ): + """ + Ensure a merge with no changes to db files will not result in any updates. + """ + pull_request_creator = PullRequestCreator('--repo_root=../../edx-platform', 'upgrade-branch', [], + [], 'Upgrade python requirements', 'Update python requirements', + 'make upgrade PR') + pull_request_creator.create(True) + + assert branch_exists_mock.called + assert create_branch_mock.called + self.assertEqual(create_branch_mock.call_count, 1) + assert update_files_mock.called + self.assertEqual(update_files_mock.call_count, 1) + assert create_pr_mock.called + + create_pr_mock.title = "chore: Upgrade Python requirements" + create_pr_mock.diff_url = "/" + create_pr_mock.repository.name = 'xblock-lti-consumer' + + basepath = path.dirname(__file__) + + filepath = path.abspath(path.join(basepath, "pull_request_creator_test_data", "minor_diff.txt")) + with open(filepath, "r") as f: + content = f.read().encode('utf-8') + with patch('requests.get') as mock_request: + mock_request.return_value.content = content + mock_request.return_value.status_code = 200 + + # in case of `check_automerge_variable_value` false value label will not added. + with patch( + 'edx_repo_tools.pull_request_creator.GitHubHelper.check_automerge_variable_value' + ) as check_automerge_variable_value: + check_automerge_variable_value.return_value = False + GitHubHelper().verify_upgrade_packages(create_pr_mock) + assert not create_pr_mock.set_labels.called diff --git a/tests/test_repo_checks.py b/tests/test_repo_checks.py index 671d1d27..be1cfbb3 100644 --- a/tests/test_repo_checks.py +++ b/tests/test_repo_checks.py @@ -7,7 +7,7 @@ import pytest -from edx_repo_tools.repo_checks.repo_checks import EnsureLabels +from edx_repo_tools.repo_checks import repo_checks @pytest.fixture @@ -35,12 +35,12 @@ def maintenance_label(): ] -@patch.object(EnsureLabels, "labels", labels_yaml) -class TestEnsureLabels: +@patch.object(repo_checks.Labels, "labels", labels_yaml) +class TestLabelsCheck: def test_check_for_no_change(self, maintenance_label): api = MagicMock() api.issues.list_labels_for_repo.side_effect = [[maintenance_label], None] - check_cls = EnsureLabels(api, "test_org", "test_repo") + check_cls = repo_checks.Labels(api, "test_org", "test_repo") # Make sure that the check returns True, indicating that no changes need to be made. assert check_cls.check()[0] @@ -48,7 +48,7 @@ def test_check_for_no_change(self, maintenance_label): def test_addition(self, maintenance_label): api = MagicMock() api.issues.list_labels_for_repo.return_value = [] - check_cls = EnsureLabels(api, "test_org", "test_repo") + check_cls = repo_checks.Labels(api, "test_org", "test_repo") # The check should be false because the maintenance label should be missing. assert check_cls.check()[0] == False @@ -72,7 +72,7 @@ def test_update_label(self, maintenance_label): api = MagicMock() api.issues.list_labels_for_repo.side_effect = [[maintenance_label], None] - check_cls = EnsureLabels(api, "test_org", "test_repo") + check_cls = repo_checks.Labels(api, "test_org", "test_repo") assert check_cls.check()[0] == False check_cls.fix() diff --git a/tests/test_tag_release.py b/tests/test_tag_release.py index 5562062b..50df71ca 100644 --- a/tests/test_tag_release.py +++ b/tests/test_tag_release.py @@ -88,6 +88,13 @@ def expected_repos(): 'ref': 'master', } }, + mock_repository('edx/edx-package'): { + 'metadata': { + 'annotations': { + 'openedx.org/release': 'master', + } + } + }, mock_repository('edx/XBlock'): { 'openedx-release': { 'parent-repo': 'edx/edx-platform', @@ -122,6 +129,14 @@ def expected_commits(): 'ref': 'master', 'message': 'commit message for configuration master commit', }, + mock_repository('edx/edx-package'): { + 'committer': {'name': 'Dev 2'}, + 'author': {'name': 'Dev 2'}, + 'sha': '12345deadbeef', + 'ref_type': 'branch', + 'ref': 'master', + 'message': 'commit message for configuration master commit', + }, mock_repository('edx/XBlock'): { 'committer': {'name': 'Dev 1'}, 'author': {'name': 'Dev 1'}, @@ -159,6 +174,7 @@ def test_get_ref_for_repos_not_exist(): repos = [ mock_repository('edx/edx-platform'), mock_repository('edx/configuration'), + mock_repository('edx/edx-package'), mock_repository('edx/XBlock'), ] for repo in repos: @@ -172,16 +188,20 @@ def test_commit_ref_info(expected_repos): edx_edx_platform = find_repo(expected_repos, 'edx/edx-platform') edx_configuration = find_repo(expected_repos, 'edx/configuration') + edx_edx_package = find_repo(expected_repos, 'edx/edx-package') edx_edx_platform.branch.assert_called_once_with('release') assert not edx_edx_platform.ref.called edx_configuration.branch.assert_called_once_with('master') assert not edx_configuration.ref.called + edx_edx_package.branch.assert_called_once_with('master') + assert not edx_edx_package.ref.called # The XBlock repo shouldn't have been examined at all. assert not find_repo(expected_repos, 'edx/XBlock').branch.called edx_platform_commit = edx_edx_platform.git_commit().refresh() configuration_commit = edx_configuration.git_commit().refresh() + edx_edx_package_commit = edx_edx_package.git_commit().refresh() expected_commits = { edx_edx_platform: { @@ -200,6 +220,14 @@ def test_commit_ref_info(expected_repos): 'ref': 'master', 'message': configuration_commit.message, }, + edx_edx_package: { + 'committer': edx_edx_package_commit.committer, + 'author': edx_edx_package_commit.author, + 'sha': edx_edx_package_commit.sha, + 'ref_type': 'branch', + 'ref': 'master', + 'message': edx_edx_package_commit.message, + }, } assert result == expected_commits @@ -214,6 +242,7 @@ def test_overrides_global_ref(expected_repos): result = override_repo_refs(expected_repos, override_ref="abcdef") assert find_repo_data(result, "edx/edx-platform")["openedx-release"]["ref"] == "abcdef" assert find_repo_data(result, "edx/configuration")["openedx-release"]["ref"] == "abcdef" + assert find_repo_data(result, "edx/edx-package")["metadata"]["annotations"]["openedx.org/release"] == "abcdef" def test_overrides_dict(expected_repos): @@ -221,10 +250,12 @@ def test_overrides_dict(expected_repos): "edx/edx-platform": "xyz", "edx/configuration": "refs/branch/no-way", "edx/does-not-exist": "does-not-matter", + "edx/edx-package": "release" } result = override_repo_refs(expected_repos, overrides=overrides) assert find_repo_data(result, "edx/edx-platform")["openedx-release"]["ref"] == "xyz" assert find_repo_data(result, "edx/configuration")["openedx-release"]["ref"] == "refs/branch/no-way" + assert find_repo_data(result, "edx/edx-package")["metadata"]["annotations"]["openedx.org/release"] == "release" def test_overrides_global_ref_and_dict(expected_repos): @@ -240,6 +271,7 @@ def test_overrides_global_ref_and_dict(expected_repos): ) assert find_repo_data(result, "edx/edx-platform")["openedx-release"]["ref"] == "xyz" assert find_repo_data(result, "edx/configuration")["openedx-release"]["ref"] == "fakie-mcfakerson" + assert find_repo_data(result, "edx/edx-package")["metadata"]["annotations"]["openedx.org/release"] == "fakie-mcfakerson" def test_create_happy_path(expected_commits): diff --git a/tests/test_tox_modernizer.py b/tests/test_tox_modernizer.py index c340bb56..6d506e5e 100644 --- a/tests/test_tox_modernizer.py +++ b/tests/test_tox_modernizer.py @@ -5,7 +5,7 @@ from unittest import TestCase import shutil import uuid -from edx_repo_tools.codemods.django3 import ConfigReader +from edx_repo_tools.codemods.python312 import ConfigReader class TestToxModernizer(TestCase): @@ -33,27 +33,20 @@ def _assert_django_dependencies_replaced(self, config_file): parser = self._get_parser(config_file) dependencies = parser['testenv']['deps'] - self.assertIn("django32:", dependencies) - self.assertIn("django40:", dependencies) + self.assertNotIn("django32:", dependencies) + self.assertIn("django42:", dependencies) def _assert_replaces_python_interpreters(self, config_file): parser = self._get_parser(config_file) env_list = parser['tox']['envlist'] - self.assertNotRegex("py{27}", env_list) - self.assertNotIn("py{27,35}", env_list) - self.assertNotIn("py{27,35,36}", env_list) - self.assertNotIn("py{27,35,36,37}", env_list) - self.assertIn("py38", env_list) + self.assertIn("py{38, 312}", env_list) def _assert_replaces_django_runners(self, config_file): parser = self._get_parser(config_file) env_list = parser['tox']['envlist'] - self.assertNotIn("django{111}", env_list) - self.assertNotIn("django{111,20}", env_list) - self.assertNotIn("django{111,20,21}", env_list) - self.assertIn("django{32,40}", env_list) + self.assertIn("django{42}", env_list) def _assert_replaces_django_dependencies(self, config_file): self._assert_django_dependencies_replaced(config_file) @@ -63,7 +56,7 @@ def _assert_adds_django_dependencies(self, config_file): parser.read(config_file) dependencies = parser['testenv']['deps'] - dependencies = re.sub("[^\n]*django32.*\n", '', dependencies) + dependencies = re.sub("[^\n]*django42.*\n", '', dependencies) parser['testenv']['deps'] = dependencies with open(config_file, 'w') as configfile: