diff --git a/README.md b/README.md index a05bd4e..88cd194 100644 --- a/README.md +++ b/README.md @@ -86,7 +86,9 @@ creation) is skipped in non-interactive mode.** 3. Write commit messages conforming to the [Conventional Commits](https://www.conventionalcommits.org) specification, and maintain a linear commit history. 4. [Trigger](https://docs.github.com/en/actions/using-workflows/manually-running-a-workflow) - the `release-new-version` workflow to create a new release. + the `release-new-version` workflow to create a new release. If the + project was created with GitHub support disabled, use `scripts/release_new_version.py`. + Run `scripts/release_new_version.py -h` for options. 5. If working on a new clone of the repository, initialize the project environment by running: @@ -166,6 +168,9 @@ specifying them are: Dependencies should be separated by ';', and follow [poetry specifications](https://python-poetry.org/docs/dependency-specification/). 13. `--add-dev-deps`: Same as `--add-deps`, except the dependencies are added to the 'dev' group. +14. `--no-github`: Disable GitHub support. This will omit adding any + GitHub related files to the project, and will skip GitHub setup in + interactive mode. It has no effect in barebones mode. diff --git a/dist/pyseed.py b/dist/pyseed.py index f93bad0..497165e 100755 --- a/dist/pyseed.py +++ b/dist/pyseed.py @@ -521,12 +521,18 @@ class ConfigKey(Enum): "additional python dev dependencies to install (semicolon separated)", "", ) + no_github = BoolConfigKeySpec( + "no_github", + "disable github support by not including any github related files", + False, + ) BAREBONES_MODE_IGNORED_CONFIG_KEYS = [ ConfigKey.url, ConfigKey.max_py_version, ConfigKey.update_pc_hooks_on_schedule, + ConfigKey.no_github, ] @@ -722,30 +728,43 @@ def init_project(config: dict[ConfigKey, Any]): vtouch(project_path / "project-words.txt") return - min_py_minor_version = int(config[ConfigKey.min_py_version].split(".")[1]) - max_py_minor_version = int(config[ConfigKey.max_py_version].split(".")[1]) - py_minor_versions = range(min_py_minor_version, max_py_minor_version + 1) - py_version_strs = [f'"3.{minor_version}"' for minor_version in py_minor_versions] - run_tests_workflow = RUN_TESTS_WORKFLOW_TEMPLATE.format( - python_versions=", ".join(py_version_strs) - ) + if not config[ConfigKey.no_github]: + min_py_minor_version = int(config[ConfigKey.min_py_version].split(".")[1]) + max_py_minor_version = int(config[ConfigKey.max_py_version].split(".")[1]) + py_minor_versions = range(min_py_minor_version, max_py_minor_version + 1) + py_version_strs = [ + f'"3.{minor_version}"' for minor_version in py_minor_versions + ] + run_tests_workflow = RUN_TESTS_WORKFLOW_TEMPLATE.format( + python_versions=", ".join(py_version_strs) + ) - update_pc_hooks_workflow = UPDATE_PRE_COMMIT_HOOKS_WORKFLOW_TEMPLATE.format( - schedule=( - ' schedule:\n - cron: "0 0 1 * *"\n' - if config[ConfigKey.update_pc_hooks_on_schedule] - else "" + update_pc_hooks_workflow = UPDATE_PRE_COMMIT_HOOKS_WORKFLOW_TEMPLATE.format( + schedule=( + ' schedule:\n - cron: "0 0 1 * *"\n' + if config[ConfigKey.update_pc_hooks_on_schedule] + else "" + ) ) - ) - gh_workflows_dir = Path(".github") / "workflows" + gh_workflows_dir = project_path / ".github" / "workflows" + vprint(f"+ MKDIR {gh_workflows_dir}", file=sys.stderr) + gh_workflows_dir.mkdir(parents=True) + + for fname, fdata in [ + ("check-pr.yml", CHECK_PR_WORKFLOW), + ("release-new-version.yml", RELEASE_NEW_VERSION_WORKFLOW), + ("run-tests.yml", run_tests_workflow), + ("update-pre-commit-hooks.yml", update_pc_hooks_workflow), + ]: + vwritetext(gh_workflows_dir / fname, fdata) + scripts_dir = Path("scripts") main_pkg_dir = Path("src") / config[ConfigKey.main_pkg] tests_dir = Path("tests") www_dir = Path("www") for directory in [ - gh_workflows_dir, scripts_dir, main_pkg_dir, tests_dir, @@ -755,6 +774,25 @@ def init_project(config: dict[ConfigKey, Any]): vprint(f"+ MKDIR {project_path / directory}", file=sys.stderr) (project_path / directory).mkdir(parents=True) + script_files_data = [ + (scripts_dir / "gen_site_usage_pages.py", GEN_SITE_USAGE_PAGES_SCRIPT), + (scripts_dir / "make_docs.py", MAKE_DOCS_SCRIPT), + ] + if config[ConfigKey.no_github]: + script_files_data.append( + (scripts_dir / "release_new_version.py", RELEASE_NEW_VERSION_SCRIPT) + ) + else: + script_files_data.extend( + [ + ( + scripts_dir / "commit_and_tag_version.py", + COMMIT_AND_TAG_VERSION_SCRIPT, + ), + (scripts_dir / "verify_pr_commits.py", VERIFY_PR_COMMITS_SCRIPT), + ] + ) + for fpath, fdata in [ ("README.md", readme), (".commitlintrc.yaml", COMMITLINT_RC), @@ -765,14 +803,7 @@ def init_project(config: dict[ConfigKey, Any]): (".pre-commit-config.yaml", pre_commit_config), (".prettierignore", PRETTIER_IGNORE), (".prettierrc.js", PRETTIER_RC), - (gh_workflows_dir / "check-pr.yml", CHECK_PR_WORKFLOW), - (gh_workflows_dir / "release-new-version.yml", RELEASE_NEW_VERSION_WORKFLOW), - (gh_workflows_dir / "run-tests.yml", run_tests_workflow), - (gh_workflows_dir / "update-pre-commit-hooks.yml", update_pc_hooks_workflow), - (scripts_dir / "commit_and_tag_version.py", COMMIT_AND_TAG_VERSION_SCRIPT), - (scripts_dir / "gen_site_usage_pages.py", GEN_SITE_USAGE_PAGES_SCRIPT), - (scripts_dir / "make_docs.py", MAKE_DOCS_SCRIPT), - (scripts_dir / "verify_pr_commits.py", VERIFY_PR_COMMITS_SCRIPT), + *script_files_data, (main_pkg_dir / "__init__.py", INIT_PY), (main_pkg_dir / "_version.py", VERSION_PY), (www_dir / "theme" / "overrides" / "main.html", THEME_OVERRIDE_MAIN), @@ -1020,7 +1051,11 @@ def main(): init_project(config) create_project(config) project_created = True - if config_mode == ConfigMode.non_interactive or config[ConfigKey.barebones]: + if ( + config_mode == ConfigMode.non_interactive + or config[ConfigKey.barebones] + or config[ConfigKey.no_github] + ): return do_setup_github = get_yes_no_input( @@ -1947,6 +1982,128 @@ def main(): """ +RELEASE_NEW_VERSION_SCRIPT = r"""#!/usr/bin/env python3 + +import os +import subprocess +import sys +from argparse import ArgumentParser, BooleanOptionalAction, RawTextHelpFormatter +from getpass import getpass + + +####################################################################### +# ARGUMENT PARSING + +arg_parser = ArgumentParser(formatter_class=RawTextHelpFormatter) +arg_parser.add_argument( + "-f", + "--first-release", + action="store_true", + help="Create the first release. If version is not specified,\n" + "it will be set to '1.0.0'. No changelog will be generated.", +) +arg_parser.add_argument( + "-r", + "--release-version", + type=str, + metavar="VERSION", + help="Release as the provided version. Should be a valid semvar\n" + "version, or one of 'major', 'minor', or 'patch'. If not\n" + "provided, version is determined automatically from commits\n" + "since the previous release.", +) +arg_parser.add_argument( + "-p", + "--pre-release", + action="store_true", + help="Make a pre-release. If a custom version is specified, or a first\n" + "release is being made, a pre-release tag must also be provided,\n" + "or the custom version should be of the form\n" + "'..-'.", +) +arg_parser.add_argument( + "-t", + "--pre-release-tag", + type=str, + metavar="TAG", + help="Use provided tag for pre-release. This only has effect\n" + "if making a pre-release, and will create release with version\n" + "'..--'.", +) +arg_parser.add_argument( + "--git-push", + action=BooleanOptionalAction, + help="Whether to run `git push` after creating release commit.\n" + "True by default.", + default=True, +) +arg_parser.add_argument( + "--pypi-publish", + action=BooleanOptionalAction, + help="Whether to publish the project to PyPI. Requires an access token.\n" + "The token can be provided with an environment variable named PYPI_TOKEN.\n" + "If this is not available, the user is prompted for it. True by default.", + default=True, +) +arg_parser.add_argument( + "--dry-run", action="store_true", help="Only show what commands will be executed." +) +args = arg_parser.parse_args() + + +####################################################################### +# CALL npx commit-and-tag-version + +commit_and_tag_cmd = ["npx", "commit-and-tag-version"] + +if args.first_release: + commit_and_tag_cmd.append("--skip.changelog") + commit_and_tag_cmd.append("--skip.commit") + +if args.release_version: + commit_and_tag_cmd.extend(["-r", args.release_version]) +elif args.first_release: + commit_and_tag_cmd.extend(["-r", "1.0.0"]) + +if args.pre_release: + commit_and_tag_cmd.append("-p") + if args.pre_release_tag: + commit_and_tag_cmd.append(args.pre_release_tag) + +if args.dry_run: + commit_and_tag_cmd.append("--dry-run") + +print(f"+ {' '.join(commit_and_tag_cmd)}", file=sys.stderr) +subprocess.run(commit_and_tag_cmd, check=True) + + +####################################################################### +# CALL git push + +if args.git_push: + push_cmd = ["git", "push", "--follow-tags", "origin", "master"] + print(f"+ {' '.join(push_cmd)}", file=sys.stderr) + if not args.dry_run: + subprocess.run(push_cmd, check=True) + + +####################################################################### +# CALL poetry publish + +if args.pypi_publish: + pypi_publish_cmd = ["poetry", "publish", "-u", "__token__", "-p", "PYPI_TOKEN"] + print(f"+ {' '.join(pypi_publish_cmd)}", file=sys.stderr) + if not args.dry_run: + if "PYPI_TOKEN" in os.environ: + pypi_token = os.environ["PYPI_TOKEN"] + else: + pypi_token = getpass("PyPI access token: ") + pypi_publish_cmd[-1] = pypi_token + + subprocess.run(pypi_publish_cmd, check=True) + +""" + INIT_PY = r"""from ._version import __version__ """ diff --git a/src/data/release_new_version.py b/src/data/release_new_version.py new file mode 100644 index 0000000..267fe47 --- /dev/null +++ b/src/data/release_new_version.py @@ -0,0 +1,119 @@ +#!/usr/bin/env python3 + +import os +import subprocess +import sys +from argparse import ArgumentParser, BooleanOptionalAction, RawTextHelpFormatter +from getpass import getpass + + +####################################################################### +# ARGUMENT PARSING + +arg_parser = ArgumentParser(formatter_class=RawTextHelpFormatter) +arg_parser.add_argument( + "-f", + "--first-release", + action="store_true", + help="Create the first release. If version is not specified,\n" + "it will be set to '1.0.0'. No changelog will be generated.", +) +arg_parser.add_argument( + "-r", + "--release-version", + type=str, + metavar="VERSION", + help="Release as the provided version. Should be a valid semvar\n" + "version, or one of 'major', 'minor', or 'patch'. If not\n" + "provided, version is determined automatically from commits\n" + "since the previous release.", +) +arg_parser.add_argument( + "-p", + "--pre-release", + action="store_true", + help="Make a pre-release. If a custom version is specified, or a first\n" + "release is being made, a pre-release tag must also be provided,\n" + "or the custom version should be of the form\n" + "'..-'.", +) +arg_parser.add_argument( + "-t", + "--pre-release-tag", + type=str, + metavar="TAG", + help="Use provided tag for pre-release. This only has effect\n" + "if making a pre-release, and will create release with version\n" + "'..--'.", +) +arg_parser.add_argument( + "--git-push", + action=BooleanOptionalAction, + help="Whether to run `git push` after creating release commit.\n" + "True by default.", + default=True, +) +arg_parser.add_argument( + "--pypi-publish", + action=BooleanOptionalAction, + help="Whether to publish the project to PyPI. Requires an access token.\n" + "The token can be provided with an environment variable named PYPI_TOKEN.\n" + "If this is not available, the user is prompted for it. True by default.", + default=True, +) +arg_parser.add_argument( + "--dry-run", action="store_true", help="Only show what commands will be executed." +) +args = arg_parser.parse_args() + + +####################################################################### +# CALL npx commit-and-tag-version + +commit_and_tag_cmd = ["npx", "commit-and-tag-version"] + +if args.first_release: + commit_and_tag_cmd.append("--skip.changelog") + commit_and_tag_cmd.append("--skip.commit") + +if args.release_version: + commit_and_tag_cmd.extend(["-r", args.release_version]) +elif args.first_release: + commit_and_tag_cmd.extend(["-r", "1.0.0"]) + +if args.pre_release: + commit_and_tag_cmd.append("-p") + if args.pre_release_tag: + commit_and_tag_cmd.append(args.pre_release_tag) + +if args.dry_run: + commit_and_tag_cmd.append("--dry-run") + +print(f"+ {' '.join(commit_and_tag_cmd)}", file=sys.stderr) +subprocess.run(commit_and_tag_cmd, check=True) + + +####################################################################### +# CALL git push + +if args.git_push: + push_cmd = ["git", "push", "--follow-tags", "origin", "master"] + print(f"+ {' '.join(push_cmd)}", file=sys.stderr) + if not args.dry_run: + subprocess.run(push_cmd, check=True) + + +####################################################################### +# CALL poetry publish + +if args.pypi_publish: + pypi_publish_cmd = ["poetry", "publish", "-u", "__token__", "-p", "PYPI_TOKEN"] + print(f"+ {' '.join(pypi_publish_cmd)}", file=sys.stderr) + if not args.dry_run: + if "PYPI_TOKEN" in os.environ: + pypi_token = os.environ["PYPI_TOKEN"] + else: + pypi_token = getpass("PyPI access token: ") + pypi_publish_cmd[-1] = pypi_token + + subprocess.run(pypi_publish_cmd, check=True) diff --git a/src/pyseed.py b/src/pyseed.py index 67e77ad..73f343b 100644 --- a/src/pyseed.py +++ b/src/pyseed.py @@ -519,12 +519,18 @@ class ConfigKey(Enum): "additional python dev dependencies to install (semicolon separated)", "", ) + no_github = BoolConfigKeySpec( + "no_github", + "disable github support by not including any github related files", + False, + ) BAREBONES_MODE_IGNORED_CONFIG_KEYS = [ ConfigKey.url, ConfigKey.max_py_version, ConfigKey.update_pc_hooks_on_schedule, + ConfigKey.no_github, ] @@ -720,30 +726,43 @@ def init_project(config: dict[ConfigKey, Any]): vtouch(project_path / "project-words.txt") return - min_py_minor_version = int(config[ConfigKey.min_py_version].split(".")[1]) - max_py_minor_version = int(config[ConfigKey.max_py_version].split(".")[1]) - py_minor_versions = range(min_py_minor_version, max_py_minor_version + 1) - py_version_strs = [f'"3.{minor_version}"' for minor_version in py_minor_versions] - run_tests_workflow = RUN_TESTS_WORKFLOW_TEMPLATE.format( - python_versions=", ".join(py_version_strs) - ) + if not config[ConfigKey.no_github]: + min_py_minor_version = int(config[ConfigKey.min_py_version].split(".")[1]) + max_py_minor_version = int(config[ConfigKey.max_py_version].split(".")[1]) + py_minor_versions = range(min_py_minor_version, max_py_minor_version + 1) + py_version_strs = [ + f'"3.{minor_version}"' for minor_version in py_minor_versions + ] + run_tests_workflow = RUN_TESTS_WORKFLOW_TEMPLATE.format( + python_versions=", ".join(py_version_strs) + ) - update_pc_hooks_workflow = UPDATE_PRE_COMMIT_HOOKS_WORKFLOW_TEMPLATE.format( - schedule=( - ' schedule:\n - cron: "0 0 1 * *"\n' - if config[ConfigKey.update_pc_hooks_on_schedule] - else "" + update_pc_hooks_workflow = UPDATE_PRE_COMMIT_HOOKS_WORKFLOW_TEMPLATE.format( + schedule=( + ' schedule:\n - cron: "0 0 1 * *"\n' + if config[ConfigKey.update_pc_hooks_on_schedule] + else "" + ) ) - ) - gh_workflows_dir = Path(".github") / "workflows" + gh_workflows_dir = project_path / ".github" / "workflows" + vprint(f"+ MKDIR {gh_workflows_dir}", file=sys.stderr) + gh_workflows_dir.mkdir(parents=True) + + for fname, fdata in [ + ("check-pr.yml", CHECK_PR_WORKFLOW), + ("release-new-version.yml", RELEASE_NEW_VERSION_WORKFLOW), + ("run-tests.yml", run_tests_workflow), + ("update-pre-commit-hooks.yml", update_pc_hooks_workflow), + ]: + vwritetext(gh_workflows_dir / fname, fdata) + scripts_dir = Path("scripts") main_pkg_dir = Path("src") / config[ConfigKey.main_pkg] tests_dir = Path("tests") www_dir = Path("www") for directory in [ - gh_workflows_dir, scripts_dir, main_pkg_dir, tests_dir, @@ -753,6 +772,25 @@ def init_project(config: dict[ConfigKey, Any]): vprint(f"+ MKDIR {project_path / directory}", file=sys.stderr) (project_path / directory).mkdir(parents=True) + script_files_data = [ + (scripts_dir / "gen_site_usage_pages.py", GEN_SITE_USAGE_PAGES_SCRIPT), + (scripts_dir / "make_docs.py", MAKE_DOCS_SCRIPT), + ] + if config[ConfigKey.no_github]: + script_files_data.append( + (scripts_dir / "release_new_version.py", RELEASE_NEW_VERSION_SCRIPT) + ) + else: + script_files_data.extend( + [ + ( + scripts_dir / "commit_and_tag_version.py", + COMMIT_AND_TAG_VERSION_SCRIPT, + ), + (scripts_dir / "verify_pr_commits.py", VERIFY_PR_COMMITS_SCRIPT), + ] + ) + for fpath, fdata in [ ("README.md", readme), (".commitlintrc.yaml", COMMITLINT_RC), @@ -763,14 +801,7 @@ def init_project(config: dict[ConfigKey, Any]): (".pre-commit-config.yaml", pre_commit_config), (".prettierignore", PRETTIER_IGNORE), (".prettierrc.js", PRETTIER_RC), - (gh_workflows_dir / "check-pr.yml", CHECK_PR_WORKFLOW), - (gh_workflows_dir / "release-new-version.yml", RELEASE_NEW_VERSION_WORKFLOW), - (gh_workflows_dir / "run-tests.yml", run_tests_workflow), - (gh_workflows_dir / "update-pre-commit-hooks.yml", update_pc_hooks_workflow), - (scripts_dir / "commit_and_tag_version.py", COMMIT_AND_TAG_VERSION_SCRIPT), - (scripts_dir / "gen_site_usage_pages.py", GEN_SITE_USAGE_PAGES_SCRIPT), - (scripts_dir / "make_docs.py", MAKE_DOCS_SCRIPT), - (scripts_dir / "verify_pr_commits.py", VERIFY_PR_COMMITS_SCRIPT), + *script_files_data, (main_pkg_dir / "__init__.py", INIT_PY), (main_pkg_dir / "_version.py", VERSION_PY), (www_dir / "theme" / "overrides" / "main.html", THEME_OVERRIDE_MAIN), @@ -1018,7 +1049,11 @@ def main(): init_project(config) create_project(config) project_created = True - if config_mode == ConfigMode.non_interactive or config[ConfigKey.barebones]: + if ( + config_mode == ConfigMode.non_interactive + or config[ConfigKey.barebones] + or config[ConfigKey.no_github] + ): return do_setup_github = get_yes_no_input( @@ -1118,6 +1153,9 @@ def main(): VERIFY_PR_COMMITS_SCRIPT = r"""!!!verify_pr_commits_script.py!!! """ +RELEASE_NEW_VERSION_SCRIPT = r"""!!!release_new_version.py!!! +""" + INIT_PY = r"""from ._version import __version__ """ diff --git a/tests/test_pyseed.py b/tests/test_pyseed.py index 9ee5c88..f520edf 100644 --- a/tests/test_pyseed.py +++ b/tests/test_pyseed.py @@ -728,6 +728,7 @@ def setUp(self) -> None: pyseed.ConfigKey.update_pc_hooks_on_schedule: False, pyseed.ConfigKey.add_deps: "", pyseed.ConfigKey.add_dev_deps: "", + pyseed.ConfigKey.no_github: False, } def tearDown(self): @@ -737,55 +738,78 @@ def tearDown(self): class TestInitProject(_BaseTestCreateProject): def test_init_project_writes_all_expected_files(self): - pyseed.init_project(self.config) - project_dir = Path(self.tempdir.name) / self.project_name - - self.assertTrue(project_dir.exists()) - for fname in [ - ".github/workflows/check-pr.yml", - ".github/workflows/release-new-version.yml", - ".github/workflows/run-tests.yml", - ".github/workflows/update-pre-commit-hooks.yml", - "scripts/commit_and_tag_version.py", - "scripts/gen_site_usage_pages.py", - "scripts/make_docs.py", - "scripts/verify_pr_commits.py", - "src/test_project/__init__.py", - "src/test_project/_version.py", - "src/test_project/py.typed", - "tests/__init__.py", - "www/theme/overrides/main.html", - ".commitlintrc.yaml", - ".cspell.json", - ".editorconfig", - ".gitattributes", - ".gitignore", - ".pre-commit-config.yaml", - ".prettierignore", - ".prettierrc.js", - "CHANGELOG.md", - "LICENSE.md", - "README.md", - "mkdocs.yml", - "project-words.txt", - "pyproject.toml", - ]: - with self.subTest(fname): - self.assertTrue((project_dir / fname).exists()) + for i, no_github in enumerate([False, True]): + if i != 0: + self.tearDown() + self.setUp() + + self.config[pyseed.ConfigKey.no_github] = no_github + + pyseed.init_project(self.config) + project_dir = Path(self.tempdir.name) / self.project_name + + self.assertTrue(project_dir.exists()) + check_exist_fnames = [ + "scripts/gen_site_usage_pages.py", + "scripts/make_docs.py", + "src/test_project/__init__.py", + "src/test_project/_version.py", + "src/test_project/py.typed", + "tests/__init__.py", + "www/theme/overrides/main.html", + ".commitlintrc.yaml", + ".cspell.json", + ".editorconfig", + ".gitattributes", + ".gitignore", + ".pre-commit-config.yaml", + ".prettierignore", + ".prettierrc.js", + "CHANGELOG.md", + "LICENSE.md", + "README.md", + "mkdocs.yml", + "project-words.txt", + "pyproject.toml", + ] + no_github_fnames = ["scripts/release_new_version.py"] + yes_github_fnames = [ + "scripts/commit_and_tag_version.py", + "scripts/verify_pr_commits.py", + ".github/workflows/check-pr.yml", + ".github/workflows/release-new-version.yml", + ".github/workflows/run-tests.yml", + ".github/workflows/update-pre-commit-hooks.yml", + ] + if no_github: + check_exist_fnames.extend(no_github_fnames) + check_noexist_fnames = yes_github_fnames + else: + check_exist_fnames.extend(yes_github_fnames) + check_noexist_fnames = no_github_fnames + + for fname in check_exist_fnames: + with self.subTest(no_github=no_github, should_exist=fname): + self.assertTrue((project_dir / fname).exists()) + for fname in check_noexist_fnames: + with self.subTest(no_github=no_github, should_not_exist=fname): + self.assertFalse((project_dir / fname).exists()) + + www_src_dir = project_dir / "www" / "src" + for fname in ["CHANGELOG.md", "LICENSE.md"]: + with self.subTest(no_github=no_github, should_exist=fname): + self.assertEqual( + (www_src_dir / fname).resolve(), (project_dir / fname).resolve() + ) + self.assertEqual( + (www_src_dir / "index.md").resolve(), + (project_dir / "README.md").resolve(), + ) - www_src_dir = project_dir / "www" / "src" - for fname in ["CHANGELOG.md", "LICENSE.md"]: - with self.subTest(fname): - self.assertEqual( - (www_src_dir / fname).resolve(), (project_dir / fname).resolve() - ) - self.assertEqual( - (www_src_dir / "index.md").resolve(), (project_dir / "README.md").resolve() - ) - scripts_dir = project_dir / "scripts" - for fpath in scripts_dir.glob("*"): - with self.subTest(fpath): - self.assertTrue(os.access(fpath, os.X_OK)) + scripts_dir = project_dir / "scripts" + for fpath in scripts_dir.glob("*"): + with self.subTest(no_github=no_github, should_exist=fpath): + self.assertTrue(os.access(fpath, os.X_OK)) def test_init_project_writes_all_expected_files_in_barebones_mode(self): self.config[pyseed.ConfigKey.barebones] = True @@ -997,23 +1021,32 @@ def test_init_project_does_not_add_schedule_to_hooks_workflow_if_disabled(self): ) class TestCreateProject(_BaseTestCreateProject): def test_create_project_runs_without_error(self): - self.config[pyseed.ConfigKey.add_deps] = "requests;flask>=2.0,<3.0" - self.config[pyseed.ConfigKey.add_dev_deps] = "black" - pyseed.init_project(self.config) - pyseed.create_project(self.config) - pdone = pyseed.vrun( - ["git", "log", "--max-count=1", "--pretty=format:%s"], capture_output=True - ) - self.assertEqual(pdone.stdout.strip(), "chore: initial commit") - pdone = pyseed.vrun( - ["poetry", "run", "pip", "list", "--format", "freeze"], capture_output=True - ) - installed_pkgs = { - line.split("==")[0].lower() for line in pdone.stdout.splitlines() - } - for pkg in ["requests", "flask", "black"]: - with self.subTest(pkg): - self.assertIn(pkg, installed_pkgs) + for i, no_github in enumerate([False, True]): + if i != 0: + self.tearDown() + self.setUp() + + self.config[pyseed.ConfigKey.no_github] = no_github + self.config[pyseed.ConfigKey.add_deps] = "requests;flask>=2.0,<3.0" + self.config[pyseed.ConfigKey.add_dev_deps] = "black" + pyseed.init_project(self.config) + pyseed.create_project(self.config) + pdone = pyseed.vrun( + ["git", "log", "--max-count=1", "--pretty=format:%s"], + capture_output=True, + ) + with self.subTest(no_github=no_github): + self.assertEqual(pdone.stdout.strip(), "chore: initial commit") + pdone = pyseed.vrun( + ["poetry", "run", "pip", "list", "--format", "freeze"], + capture_output=True, + ) + installed_pkgs = { + line.split("==")[0].lower() for line in pdone.stdout.splitlines() + } + for pkg in ["requests", "flask", "black"]: + with self.subTest(no_github=no_github, package=pkg): + self.assertIn(pkg, installed_pkgs) def test_create_project_runs_without_error_in_barebones_mode(self): self.config[pyseed.ConfigKey.barebones] = True @@ -1040,6 +1073,47 @@ def tearDown(self) -> None: super().tearDown() self.github_api.call(f"repos/{GH_USER}/{self.project_name}", "DELETE") + def test_offline_release_script_pushes_to_github(self): + self.config[pyseed.ConfigKey.no_github] = True + pyseed.init_project(self.config) + pyseed.create_project(self.config) + self.assertTrue(Path(os.getcwd()).resolve(), Path(self.tempdir.name).resolve()) + + repo_creation_response = self.github_api.call( + "user/repos", + "POST", + { + "name": self.project_name, + "description": self.config[pyseed.ConfigKey.description], + "homepage": self.config[pyseed.ConfigKey.url], + }, + ) + try: + repo_origin = repo_creation_response["ssh_url"] + except KeyError: + raise pyseed.GitHubAPI.Error( + f"response:\n{repo_creation_response}" + ) from None + + pyseed.vrun(["git", "remote", "add", "origin", repo_origin]) + pyseed.vrun(["git", "push", "-u", "origin", "master"]) + pyseed.vrun( + [ + "poetry", + "run", + "scripts/release_new_version.py", + "-f", + "--no-pypi-publish", + ] + ) + + pdone = pyseed.vrun( + ["git", "ls-remote", "--tags", "origin"], capture_output=True, check=True + ) + tags_data = pdone.stdout + first_tag = tags_data.splitlines()[0].split("\t")[1] + self.assertEqual(first_tag, "refs/tags/v1.0.0") + def test_setup_github_runs_without_error(self): pyseed.init_project(self.config) pyseed.create_project(self.config) @@ -1141,6 +1215,24 @@ def test_main_does_not_setup_github_in_barebones_mode(self): pyseed.main() self.mock_setup_github.assert_not_called() + def test_main_does_not_setup_github_if_no_github(self): + self.config[pyseed.ConfigKey.no_github] = True + mock_get_conf = MagicMock( + return_value=(pyseed.ConfigMode.interactive, self.config) + ) + with ( + patch.multiple( + "pyseed", + get_conf=mock_get_conf, + init_project=self.mock_init_project, + create_project=self.mock_create_project, + setup_github=self.mock_setup_github, + ), + patch.multiple("sys", stdin=StringIO(initial_value="y"), stdout=StringIO()), + ): + pyseed.main() + self.mock_setup_github.assert_not_called() + def test_main_does_not_clean_up_in_non_interactive_mode(self): self.project_path = Path(self.tempdir.name) / self.project_name mock_get_conf = MagicMock(