diff --git a/src/__init__.py b/src/__init__.py deleted file mode 100644 index a88d18e..0000000 --- a/src/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -""" -Script to release new gtk-rs crates version. -""" - -__version__ = "0.0.1" -__author__ = "Guillaume Gomez" - - -__all__ = ["consts", "github", "release", "utils", "args"] diff --git a/src/args.py b/src/args.py deleted file mode 100644 index 9c03f39..0000000 --- a/src/args.py +++ /dev/null @@ -1,46 +0,0 @@ -import getopt - -# local imports -from utils import write_error, write_msg -import consts - - -def write_help(): - write_msg("release.py accepts the following options:") - write_msg("") - write_msg(" * -h | --help : display this message") - write_msg(" * -t | --token= : give the github token") - - -class Arguments: - def __init__(self): - self.token = None - - @staticmethod - def parse_arguments(argv): - try: - opts = getopt.getopt(argv, "ht:m:c:", ["help", "token="])[ - 0 - ] # second argument is "args" - except getopt.GetoptError: - write_help() - return None - - instance = Arguments() - - for opt, arg in opts: - if opt in ("-h", "--help"): - write_help() - return None - if opt in ("-t", "--token"): - instance.token = arg - else: - write_msg(f'"{opt}": unknown option') - write_msg('Use "-h" or "--help" to see help') - return None - if instance.token is None: - # In this case, I guess it's not an issue to not have a github token... - write_error("Missing token argument.") - return None - - return instance diff --git a/src/consts.py b/src/consts.py deleted file mode 100644 index 341813f..0000000 --- a/src/consts.py +++ /dev/null @@ -1,24 +0,0 @@ -from datetime import date - - -GH_API_URL = "https://api.github.com" -GITHUB_URL = "https://github.com" -GIT_URL = "git@github.com:" - -ORGANIZATION = "gtk-rs" - -BLOG_REPO = "gtk-rs.github.io" - -CORE_RELEASE_DATE = date.fromisoformat("2024-02-04") -GTK4_RELEASE_DATE = CORE_RELEASE_DATE - -REPOSITORIES = [ - { - "name": "gtk-rs-core", - "date": CORE_RELEASE_DATE, - }, - { - "name": "gtk4-rs", - "date": GTK4_RELEASE_DATE, - }, -] diff --git a/src/github.py b/src/github.py deleted file mode 100644 index fc8b64a..0000000 --- a/src/github.py +++ /dev/null @@ -1,307 +0,0 @@ -from datetime import date - -# pip3 install requests -import requests - - -def compare_dates(gh_date, comp_date): - if gh_date is None or len(gh_date) < 1: - return False - gh_date = gh_date.split("T")[0].split("-") - year = int(gh_date[0]) - month = int(gh_date[1]) - day = int(gh_date[2]) - - return date(year, month, day) >= comp_date - - -def get_page_number(url): - parts = url.split("?")[-1].split("&") - for part in parts: - if part.startswith("page="): - try: - return int(part.split("=")[-1]) - except Exception: - break - return 1 - - -def get_next_pages_url(link): - parts = link.split(",") - subs = [] - for part in parts: - subs.append(part.split(";")) - next_page_url = "" - last_page_url = "" - for sub in subs: - if len(sub) != 2: - continue - if sub[1].endswith('"next"'): - next_page_url = sub[0][1:-1] - elif sub[1].endswith('"last"'): - last_page_url = sub[0][1:-1] - return next_page_url, last_page_url - - -def filter_data(content, to_return, max_date): - total = 0 - if content.__class__.__name__ == "dict": - return 0 - for pull_request in content: - if "closed_at" in pull_request and pull_request["closed_at"] is not None: - if compare_dates(pull_request["closed_at"], max_date): - to_return.append(pull_request) - total += 1 - elif "updated_at" in pull_request: - if compare_dates(pull_request["updated_at"], max_date): - to_return.append(pull_request) - total += 1 - return total - - -def get_url_data(url, headers, params): - res = requests.get(url, headers=headers, params=params, timeout=30) - if res.status_code != 200: - if res.status_code == 403: - # We reached the rate limit. - if ( - "X-RateLimit-Limit" in res.headers - and "X-RateLimit-Remaining" in res.headers - and "X-RateLimit-Reset" in res.headers - ): - limit = res.headers["X-RateLimit-Limit"] - remaining = res.headers["X-RateLimit-Remaining"] - reset = res.headers["X-RateLimit-Reset"] - raise Exception( - "Github rate limit exceeded...\n" - f"X-RateLimit-Limit: {limit}\n" - f"X-RateLimit-Remaining: {remaining}\n" - f"X-RateLimit-Reset: {reset}" - ) - raise Exception( - f"Get request failed: '{url}', got: [{res.status_code}]: {str(res.content)}" - ) - return res - - -# This function tries to get as much github data as possible by running -# "parallel" requests. -def get_all_contents( - url, state=None, max_date=None, token=None, recursive=True, params=None -): - if params is None: - params = {} - headers = { - "User-Agent": "GuillaumeGomez", - "Accept": "application/vnd.github.v3+json", - } - params["per_page"] = 100 - if state is not None: - params["sort"] = "updated" - params["state"] = state - params["direction"] = "desc" - if token is not None: - # Authentication to github. - headers["Authorization"] = f"token {token}" - res = get_url_data(url, headers, params) - content = res.json() - to_return = [] - if max_date is not None: - if filter_data(content, to_return, max_date) < 100: - return to_return - else: - for line in content: - to_return.append(line) - if "Link" not in res.headers or not recursive: - # If there are no other pages, we can return the current content. - return to_return - - header_link = res.headers.get("Link") - if header_link is None or len(header_link) < 1: - return content - - next_page_url, last_page_url = get_next_pages_url(header_link) - if len(last_page_url) < 10 or len(next_page_url) < 10: - return to_return - next_page = get_page_number(next_page_url) - last_page = get_page_number(last_page_url) - to_replace = f"page={next_page}" - - while next_page <= last_page: - res = get_url_data( - next_page_url.replace(f"&{to_replace}", f"&page={next_page}"), headers, None - ) - if res.status_code != 200: - break - content = res.json() - if max_date is not None: - if filter_data(content, to_return, max_date) < 100: - break - else: - for line in content: - to_return.append(line) - next_page += 1 - return to_return - - -class Github: - def __init__(self, token): - self.token = token - - def get_pull(self, repo_name, repo_owner, pull_number): - return Repository(self, repo_name, repo_owner).get_pull(pull_number) - - def get_pulls(self, repo_name, repo_owner, state, max_date, only_merged=False): - return Repository(self, repo_name, repo_owner).get_pulls( - state, max_date, only_merged=only_merged - ) - - def get_organization(self, organization_name): - return Organization(self, organization_name) - - -class Organization: - def __init__(self, gh_obj, name): - self.gh_obj = gh_obj - self.name = name - - def get_repositories(self): - repos = get_all_contents( - f"https://api.github.com/orgs/{self.name}/repos", token=self.gh_obj.token - ) - if repos is None: - return [] - return [ - Repository(self.gh_obj, repo["name"], repo["owner"]["login"]) - for repo in repos - ] - - -class Repository: - def __init__(self, gh_obj, name, owner): - self.name = name - self.gh_obj = gh_obj - self.owner = owner - - def get_pulls(self, state, max_date, only_merged=False): - prs = get_all_contents( - f"https://api.github.com/repos/{self.owner}/{self.name}/pulls", - state, - max_date, - token=self.gh_obj.token, - params={ - "per_page": 100, - }, - ) - if prs is None: - return [] - return [ - self.create_pr_obj(pull_request) - for pull_request in prs - if ( - only_merged is False - or ( - pull_request["merged_at"] is not None - and len(pull_request["merged_at"]) > 0 - ) - ) - ] - - def get_pull(self, pull_number): - pull_request = get_all_contents( - f"https://api.github.com/repos/{self.owner}/{self.name}/pulls/{pull_number}", - "all", - None, - token=self.gh_obj.token, - ) - return self.create_pr_obj(pull_request, pull_number) - - def get_commits(self, branch, since, until): - commits = get_all_contents( - f"https://api.github.com/repos/{self.owner}/{self.name}/commits", - token=self.gh_obj.token, - params={ - "sha": branch, - "since": f"{since.year}-{since.month:02d}-{since.day:02d}T00:00:00Z", - "until": f"{until.year}-{until.month:02d}-{until.day:02d}T00:00:00Z", - }, - ) - if commits is None: - return [] - return [ - Commit( - x["commit"]["author"]["name"], - x["commit"]["committer"]["name"], - x["sha"], - x["commit"]["message"], - ) - for x in commits - ] - - def create_pr_obj(self, pull_request, pull_number=None): - if pull_request is None: - return None - if pull_number is None: - pull_number = pull_request["number"] - return PullRequest( - self.gh_obj, - self.name, - self.owner, - pull_number, - pull_request["base"]["ref"], - pull_request["head"]["ref"], - pull_request["head"]["sha"], - pull_request["title"], - pull_request["user"]["login"], - pull_request["state"], - pull_request["merged_at"], - pull_request["closed_at"], - ) - - -class Commit: - def __init__(self, author, committer, sha, message): - self.author = author - self.committer = committer - self.sha = sha - self.message = message - - -# Represent a Github Pull Request. -class PullRequest: - def __init__( - self, - gh_obj, - repo_name, - repo_owner, - pull_number, - target_branch, - from_branch, - head_commit, - title, - author, - open_state, - merged_at, - closed_at, - ): - self.repo_name = repo_name - self.gh_obj = gh_obj - self.repo_owner = repo_owner - self.number = pull_number - self.target_branch = target_branch - self.from_branch = from_branch - self.head_commit = head_commit - self.title = title - self.author = author - self.open_state = open_state - self.merged_at = merged_at - if self.merged_at is None: - self.merged_at = "" - self.closed_at = closed_at - if self.closed_at is None: - self.closed_at = "" - - def get_url(self): - return ( - f"https://github.com/{self.repo_owner}/{self.repo_name}/pull/{self.number}" - ) diff --git a/src/release.py b/src/release.py index eb29110..3071546 100644 --- a/src/release.py +++ b/src/release.py @@ -7,15 +7,91 @@ import sys import tempfile from os.path import join +import os +import requests +import getopt + +REPOSITORIES = [ + {"name": "gtk4-rs", "start-at": "2023-07-23", "end-at": "2024-02-04"}, + {"name": "gtk-rs-core", "start-at": "2023-07-23", "end-at": "2024-02-04"}, +] + + +def github_search(token, repo_name, start_date, end_date): + query = """ +query { + + search(query: "repo:gtk-rs/{repo_name} is:pr is:closed merged:{start_date}..{end_date} base:master sort:created-desc -author:app/dependabot", type: ISSUE, last: 100) { + edges { + node { + ... on PullRequest { + url + title + mergedAt + author { + login + } + } + } + } + } +} +""".replace("{repo_name}", repo_name) + query = query.replace("{start_date}", start_date) + query = query.replace("{end_date}", end_date) + headers = {"Authorization": f"Bearer {token}"} + request = requests.post( + "https://api.github.com/graphql", json={"query": query}, headers=headers + ) + if request.status_code == 200: + return request.json()["data"]["search"]["edges"] + else: + raise Exception( + "Query failed to run by returning code of {}. {}".format( + request.status_code, query + ) + ) + -# local imports -import consts -from args import Arguments -from github import Github -from utils import add_to_commit, clone_repo -from utils import write_error -from utils import commit, write_msg -from utils import check_if_up_to_date +def write_help(): + print("release.py accepts the following options:") + print("") + print(" * -h | --help : display this message") + print(" * -t | --token= : give the github token") + + +class Arguments: + def __init__(self): + self.token = None + + @staticmethod + def parse_arguments(argv): + try: + opts = getopt.getopt(argv, "ht:m:c:", ["help", "token="])[ + 0 + ] # second argument is "args" + except getopt.GetoptError: + write_help() + return None + + instance = Arguments() + + for opt, arg in opts: + if opt in ("-h", "--help"): + write_help() + return None + if opt in ("-t", "--token"): + instance.token = arg + else: + print(f'"{opt}": unknown option') + print('Use "-h" or "--help" to see help') + return None + if instance.token is None: + # In this case, I guess it's not an issue to not have a github token... + print("Missing token argument.") + return None + + return instance @contextmanager @@ -32,27 +108,27 @@ def temporary_directory(): raise -def write_merged_prs(merged_prs, contributors, repo_url): +def write_merged_prs(merged_prs, contributors): content = "" - for merged_pr in reversed(merged_prs): - if merged_pr.title.startswith("[release] "): - continue - if merged_pr.author not in contributors: - contributors.append(merged_pr.author) + for merged_pr in merged_prs: + merged_pr = merged_pr["node"] + if merged_pr["author"]["login"] not in contributors: + contributors.append(merged_pr["author"]["login"]) md_content = ( - merged_pr.title.replace("<", "<") + merged_pr["title"] + .replace("<", "<") .replace(">", ">") .replace("[", "\\[") .replace("]", "\\]") .replace("*", "\\*") .replace("_", "\\_") ) - content += f" * [{md_content}]({repo_url}/pull/{merged_pr.number})\n" + content += f" * [{md_content}]({merged_pr['url']})\n" return content + "\n" def build_blog_post(temp_dir, token): - write_msg("=> Building blog post...") + print("=> Building blog post...") author = input("Enter author name: ") title = input("Enter title: ") @@ -73,41 +149,32 @@ def build_blog_post(temp_dir, token): """ contributors = [] - git = Github(token) - oldest_date = None - - for repo in consts.REPOSITORIES: - release_date = repo["date"] + for repo in REPOSITORIES: repo_name = repo["name"] - if oldest_date is None or release_date < oldest_date: - oldest_date = release_date - write_msg(f"Gettings merged PRs from {repo_name}...") - merged_prs = git.get_pulls( - repo_name, consts.ORGANIZATION, "closed", release_date, only_merged=True - ) - write_msg(f"=> Got {len(merged_prs)} merged PRs") + merged_prs = github_search(token, repo_name, repo["start-at"], repo["end-at"]) + print(f"=> Got {len(merged_prs)} merged PRs") if len(merged_prs) > 0: - repo_url = f"{consts.GITHUB_URL}/{consts.ORGANIZATION}/{repo_name}" + repo_url = f"https://github.com/gtk-rs/{repo_name}" content += f"[{repo_name}]({repo_url}):\n\n" - content += write_merged_prs(merged_prs, contributors, repo_url) + content += write_merged_prs(merged_prs, contributors) - write_msg("Gettings merged PRs from gir...") - merged_prs = git.get_pulls( - "gir", consts.ORGANIZATION, "closed", oldest_date, only_merged=True + print("Gettings merged PRs from gir...") + merged_prs = github_search( + token, "gir", REPOSITORIES[0]["start-at"], REPOSITORIES[0]["end-at"] ) - write_msg(f"=> Got {len(merged_prs)} merged PRs") + print(f"=> Got {len(merged_prs)} merged PRs") if len(merged_prs) > 0: - repo_url = f"{consts.GITHUB_URL}/{consts.ORGANIZATION}/gir" + repo_url = f"https://github.com/gtk-rs/gir" content += f"All this was possible thanks to the [gtk-rs/gir]({repo_url}) project as well:" content += "\n\n" - content += write_merged_prs(merged_prs, contributors, repo_url) + content += write_merged_prs(merged_prs, contributors) content += "Thanks to all of our contributors for their (awesome!) work on this release:\n\n" # Sort contributors list alphabetically with case insensitive. contributors = sorted(contributors, key=lambda s: s.casefold()) content += "\n".join( [ - f" * [@{contributor}]({consts.GITHUB_URL}/{contributor})" + f" * [@{contributor}](https://github.com/{contributor})" for contributor in contributors ] ) @@ -115,30 +182,20 @@ def build_blog_post(temp_dir, token): current_date = time.strftime("%Y-%m-%d") file_name = join( - join(temp_dir, consts.BLOG_REPO), f"_posts/{current_date}-new-release.md" + join(temp_dir, "gtk-rs.github.io"), f"_posts/{current_date}-new-release.md" ) try: + os.makedirs(os.path.dirname(file_name)) with open(file_name, "w", encoding="utf-8") as outfile: outfile.write(content) - write_msg(f'New blog post written into "{file_name}".') - add_to_commit(consts.BLOG_REPO, temp_dir, [file_name]) - commit(consts.BLOG_REPO, temp_dir, "Add new blog post") + print(f'New blog post written into "{file_name}".') except Exception as err: - write_error(f"build_blog_post failed: {err}") - write_msg(f"\n=> Here is the blog post content:\n{content}\n<=") - write_msg("Done!") - - -def clone_website_repo(temp_dir): - write_msg("=> Cloning the repositories...") - if clone_repo(consts.BLOG_REPO, temp_dir, depth=1) is False: - write_error(f'Cannot clone the "{consts.BLOG_REPO}" repository...') - return [] - write_msg("Done!") + print(f"build_blog_post failed: {err}") + print(f"\n=> Here is the blog post content:\n{content}\n<=") + print("Done!") def start(args, temp_dir): - clone_website_repo(temp_dir) build_blog_post(temp_dir, args.token) input( "Blog post generated, press ENTER to quit (it'll remove the tmp folder and " @@ -150,11 +207,9 @@ def main(argv): args = Arguments.parse_arguments(argv) if args is None: sys.exit(1) - if check_if_up_to_date() is False: - return - write_msg("=> Creating temporary directory...") + print("=> Creating temporary directory...") with temporary_directory() as temp_dir: - write_msg(f'Temporary directory created in "{temp_dir}"') + print(f'Temporary directory created in "{temp_dir}"') start(args, temp_dir) diff --git a/src/utils.py b/src/utils.py deleted file mode 100644 index 8f03b8d..0000000 --- a/src/utils.py +++ /dev/null @@ -1,128 +0,0 @@ -from os.path import join -import subprocess -import sys - -# local import -import consts - - -def write_error(error_msg): - sys.stderr.write(f"{error_msg}\n") - - -def write_msg(msg): - sys.stdout.write(f"{msg}\n") - - -def convert_to_string(content): - if content.__class__.__name__ == "bytes": - return content.decode("utf-8") - return content - - -def exec_command(command, timeout=None, show_output=False, cwd=None): - if show_output: - write_msg(f"Executing command {command} with cwd: {cwd}") - child = subprocess.Popen( - command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd - ) - if timeout is not None: - stdout, stderr = child.communicate(timeout=timeout) - else: - stdout, stderr = child.communicate() - if show_output: - write_msg(f"== STDOUT == {stdout}") - write_msg(f"== STDERR == {stderr}") - stdout = convert_to_string(stdout) - stderr = convert_to_string(stderr) - return (child.returncode == 0, stdout, stderr) - - -def exec_command_and_print_error(command, timeout=None, cwd=None): - ret, stdout, stderr = exec_command(command, timeout=timeout, cwd=cwd) - if not ret: - full_command = " ".join(command) - write_error(f'Command "{full_command}" failed:') - if len(stdout) > 0: - write_error(f"=== STDOUT ===\n{stdout}\n") - if len(stderr) > 0: - write_error(f"=== STDERR ===\n{stderr}\n") - return ret - - -def clone_repo(repo_name, temp_dir, depth=None): - repo_url = f"{consts.GIT_URL}/{consts.ORGANIZATION}/{repo_name}.git" - target_dir = join(temp_dir, repo_name) - try: - write_msg(f'=> Cloning "{repo_name}" from "{repo_url}"') - command = ["git", "clone", repo_url, target_dir] - if depth is not None: - command = ["git", "clone", "--depth", str(depth), repo_url, target_dir] - ret, stdout, stderr = exec_command(command, timeout=300) - if not ret: - full_command = " ".join(command) - write_error( - f'command "{full_command}" failed: ===STDOUT===\n{stdout}\n===STDERR===\n{stderr}' - ) - return False - command = ["git", "submodule", "update", "--init"] - if not exec_command_and_print_error(command, cwd=target_dir): - input("Failed to init submodule... Press ENTER to continue") - return True - except subprocess.TimeoutExpired: - full_command = " ".join(command) - write_error(f"command timed out: {full_command}") - except Exception as err: - full_command = " ".join(command) - write_error(f'command "{full_command}" got an exception: {err}') - return False - - -def commit(repo_name, temp_dir, commit_msg): - repo_path = join(temp_dir, repo_name) - command = ["git", "commit", ".", "-m", commit_msg] - if not exec_command_and_print_error(command, cwd=repo_path): - input("Fix the error and then press ENTER") - - -def add_to_commit(repo_name, temp_dir, files_to_add): - repo_path = join(temp_dir, repo_name) - command = ["git", "add"] - for file in files_to_add: - command.append(file) - if not exec_command_and_print_error(command, cwd=repo_path): - input("Fix the error and then press ENTER") - - -def get_last_commit_hash(repo_path): - success, out, _ = exec_command(["git", "rev-parse", "HEAD"], cwd=repo_path) - if success is True: - return out.strip() - return "" - - -def get_repo_last_commit_hash(repo_url): - success, out, _ = exec_command( - ["git", "ls-remote", repo_url, "HEAD"], show_output=True - ) - if success is True: - out = out.split("\n", maxsplit=1)[0].strip() - return out.split("\t", maxsplit=1)[0].split(" ", maxsplit=1)[0] - return "" - - -def check_if_up_to_date(): - write_msg("Checking if up-to-date...") - remote_repo = "git@github.com:gtk-rs/release.git" - last_commit = get_last_commit_hash(".") - remote_last_commit = get_repo_last_commit_hash(remote_repo) - if last_commit != remote_last_commit: - write_msg( - f"Remote repository `{remote_repo}` has a different last commit than local: `" - f"{remote_last_commit}` != `{last_commit}`" - ) - text = input("Do you want to continue anyway? [y/N] ").strip().lower() - if len(text) == 0 or text != "y": - write_msg("Ok, leaving then. Don't forget to update!") - return False - return True