diff --git a/.ci/travis/script.sh b/.ci/travis/script.sh index 4925751..593c61d 100755 --- a/.ci/travis/script.sh +++ b/.ci/travis/script.sh @@ -9,7 +9,7 @@ if [[ $PYLINT == "1" ]]; then # purposes. pylint tests \ --disable missing-docstring,invalid-name \ - --disable protected-access,no-self-use + --disable protected-access,no-self-use,unused-argument fi if [[ $READTHEDOCS == "1" ]]; then diff --git a/.gitignore b/.gitignore index bdbf7e0..48e6cdc 100644 --- a/.gitignore +++ b/.gitignore @@ -12,5 +12,5 @@ /.provision /.ci/vagrant/sshd/files/authorized_keys /pywincffi.ini -*Release* +/Release* *_pywincffi.c diff --git a/dev_requirements.txt b/dev_requirements.txt index 387c49a..f3bc26c 100644 --- a/dev_requirements.txt +++ b/dev_requirements.txt @@ -8,4 +8,6 @@ sphinx wheel setuptools coverage +requests codecov +PyGithub diff --git a/docs/source/conf.py b/docs/source/conf.py index e51744e..4113977 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -3,7 +3,6 @@ from __future__ import print_function -import ast import os import shutil import sys @@ -32,6 +31,8 @@ # Required so we don't need to have pywincffi installed. sys.path.insert(0, ROOT) +from pywincffi import __version__ + # -- General configuration ------------------------------------------------ @@ -71,21 +72,12 @@ copyright = "2015, Oliver Palmer" author = "Oliver Palmer" -with open(join(ROOT, "setup.py")) as setup_py: - module = ast.parse(setup_py.read()) - -for node in ast.walk(module): - if isinstance(node, ast.keyword) and node.arg == "version": - parsed_version = node.value.s.split(".") - break -else: - raise ValueError("Failed to find `version` keyword") - # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. -release = ".".join(parsed_version) -version = ".".join(parsed_version[0:2]) +version = list(map(str, __version__)) +release = ".".join(version) +version = ".".join(version[0:2]) # Delete the existing doc files first so it # sphinx-apidoc is clean every time. diff --git a/pywincffi/__init__.py b/pywincffi/__init__.py index e69de29..3843925 100644 --- a/pywincffi/__init__.py +++ b/pywincffi/__init__.py @@ -0,0 +1,9 @@ +""" +PyWinCFFI +========= + +A core Python package that uses :mod:`cffi` to interact +with Windows APIs. See :ref:`modindex` for more detailed +information. +""" +__version__ = (0, 1, 0) diff --git a/pywincffi/core/__init__.py b/pywincffi/core/__init__.py index 17fc029..d27a767 100644 --- a/pywincffi/core/__init__.py +++ b/pywincffi/core/__init__.py @@ -1,8 +1,7 @@ """ -Core -==== +Core Sub-Package +================ -The core package used internally by pywincffi. This package contains -wrappers for :class:`cffi.api.FFI`, a logger and some basic code used -by the unittests. +The core package is used internally by pywincffi. See the documentation +for each module for more information. """ diff --git a/pywincffi/core/pywincffi.ini b/pywincffi/core/pywincffi.ini index 714960f..56fde39 100644 --- a/pywincffi/core/pywincffi.ini +++ b/pywincffi/core/pywincffi.ini @@ -7,3 +7,7 @@ ; error ; critical log_level=warning + +; Used by release tools to interact with GitHub's +; APIs. +github_token= \ No newline at end of file diff --git a/pywincffi/dev/__init__.py b/pywincffi/dev/__init__.py index 170299f..ee7df69 100644 --- a/pywincffi/dev/__init__.py +++ b/pywincffi/dev/__init__.py @@ -1,6 +1,7 @@ """ -Dev -==== +Development Sub-Package +======================= + This package is used for development, testing and release purposes. It does not contain core functionality of pywincffi. """ diff --git a/pywincffi/dev/release.py b/pywincffi/dev/release.py new file mode 100644 index 0000000..48c695d --- /dev/null +++ b/pywincffi/dev/release.py @@ -0,0 +1,441 @@ +""" +Release +======= + +A module for developers which can retrieve information for or +produce a release. +""" + +from __future__ import print_function + +import os +import shutil +import subprocess +import sys +import tempfile +from collections import namedtuple +from errno import EEXIST, ENOENT +from os.path import join, basename, dirname, abspath + +try: + # pylint: disable=import-error,wrong-import-order + from httplib import responses, OK + from StringIO import StringIO +except ImportError: # pragma: no cover + # pylint: disable=import-error,wrong-import-order + from http.client import responses, OK + from io import StringIO + + +import requests +from github import Github +from requests.adapters import HTTPAdapter + +from pywincffi.core.config import config +from pywincffi.core.logger import get_logger + +try: + WindowsError +except NameError: # pragma: no cover + WindowsError = OSError # pylint: disable=redefined-builtin + +REPO_ROOT = dirname(dirname(dirname(abspath(__file__)))) + +logger = get_logger("dev.release") + + +def check_wheel(path): + """ + Runs `wheel unpack` on ``path`` and returns True on success, False + on failure. This is used by :meth:`artifacts` to do some validation + on the downloaded file. + + The intent of this method is to ensure that the file we downloaded + structurally makes sense at a high level. It's possible the file + we downloaded could be corrupt or incomplete and we don't want to + upload a bad file. + + :param str path: + The path to run `wheel unpack` on. + """ + unpack_dir = tempfile.mkdtemp() + + # Try to figure out where the wheel command is. %PATH% itself + # may not be setup correctly so we look in the most obvious places. + wheel_commands = [ + "wheel", + join(dirname(sys.executable), "Scripts", "wheel.exe"), + join(dirname(sys.executable), "bin", "wheel"), + join(dirname(sys.executable), "wheel.exe"), + join(dirname(sys.executable), "wheel") + + ] + for wheelcmd in wheel_commands: + try: + subprocess.check_call( + [wheelcmd, "version"], stdout=subprocess.PIPE) + break + except (OSError, WindowsError) as error: # pragma: no cover + if error.errno == ENOENT: + continue + + logger.error("Failed to execute %s", wheelcmd) + raise + else: # pragma: no cover + raise OSError( + "Failed to locate the `wheel` command. " + "Searched %s." % wheel_commands) + + # pylint: disable=undefined-loop-variable + command = [wheelcmd, "unpack", path, "--dest", unpack_dir] + + try: + subprocess.check_call( + command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + + except subprocess.CalledProcessError: + logger.error("Failed to unpack wheel with %r", " ".join(command)) + return False + + else: + shutil.rmtree(unpack_dir, ignore_errors=True) + return True + + +class Session(object): + """ + A class which acts as a provider for other APIs by sharing + a single requests session + Used by other APIs to construct and share a single + :class:`requests.Session` as well + """ + session = requests.Session() + session.headers.update({ + "Accept": "application/json" + }) + + @classmethod + def check_code(cls, response, expected): + """ + Check the HTTP response code from ``response`` against an + expected value. + + :param requests.Response response: + The response to check the status code for + + :param int expected: + The expected http response code + + :raises RuntimeError: + Raised if the response's HTTP status code does not + match ``expected`` + """ + assert isinstance(response, requests.Response) + + if response.status_code != expected: + raise RuntimeError( + "Expected %s %s for GET %s. Got %s %s instead." % ( + expected, responses[expected], response.url, + response.status_code, responses[response.status_code])) + + @classmethod + def json(cls, url, expected=OK): + """ + Downloads the requested url and returns the json data. + + :param str url: + The url to request. + + :param int expected: + The HTTP response code we should expect for 'success'. This + is set to 200 by default. + + :raises RuntimeError: + Raised if the http response's status code does not + equal ``expected``. + """ + logger.debug("GET %s", url) + response = cls.session.get(url) + cls.check_code(response, expected) + return response.json() + + @classmethod + def download(cls, url, path=None, chunk_size=1024): + """ + Downloads the data from ``url`` to the requested path or a + random path if ``path`` is not provided + + :param str url: + The url to download from + + :keyword str path: + The path to download to. A temporary file will be used + if a path is not provided. + + :keyword int chunk_size: + How large of a chunk to download at once from ``url`` + + :return: + Returns the path the data from ``url`` was written to. + """ + if path is None: + fd, path = tempfile.mkstemp() + os.close(fd) + + logger.debug("GET %s -> %s", url, path) + response = cls.session.get(url, stream=True) + cls.check_code(response, OK) + + with open(path, "wb") as file_: + for chunk in response.iter_content(chunk_size=chunk_size): + if chunk: + file_.write(chunk) + + return path + + +class GitHubAPI(object): # pylint: disable=too-many-instance-attributes + """ + A wrapper around the :class:`github.GitHub` class + which provides methods for constructing releases, + tags, etc. + """ + PROJECT = "pywincffi" + REPO_NAME = "opalmer/%s" % PROJECT + + def __init__(self, version, branch="master"): + self.version = version + self.branch = branch + self.read_the_docs = \ + "https://%s.readthedocs.org/en/%s/" % (self.PROJECT, self.version) + self.pypi_release = \ + "https://pypi.python.org/pypi/%s/%s" % (self.PROJECT, self.version) + self.milestone_filter = \ + "https://github.com/%s/issues?q=milestone:%s" % ( + self.REPO_NAME, self.version + ) + + github_token = config.get("pywincffi", "github_token") + if not github_token: + raise RuntimeError( + "pywincffi.github_token is not set in the config") + + self.hub = Github(login_or_token=github_token) + self.repo = self.hub.get_repo(self.REPO_NAME) + + for milestone in self.repo.get_milestones(): + if milestone.title == self.version: + self.milestone = milestone + break + else: + raise ValueError( + "Failed to locate milestone for version %s" % self.version) + + def commit(self): + """Returns the sha1 of the latest commit for the publish branch""" + branch = self.repo.get_branch(self.branch) + return branch.commit.sha + + def release_message(self): + """Produces release message for :meth:`create_release` to use.""" + output = StringIO() + + print("## External Links", file=output) + print("Links for documentation, release files and other useful " + "information.", file=output) + print("* [Documentation](%s)" % self.read_the_docs, file=output) + print("* [PyPi Package](%s)" % self.pypi_release, file=output) + print("* [GitHub Issues](%s)" % self.milestone_filter, file=output) + print("", file=output) + + print("## Pull Requests and Issues", file=output) + print("Pull requests and issues associated with this release.", + file=output) + print("", file=output) + issues = { + "bugs": [], + "enhancements": [], + "unittests": [], + "documentation": [], + "other": [] + } + for issue in self.repo.get_issues( + milestone=self.milestone, state="all"): + for label in issue.labels: + if label.name == "bug": + issues["bugs"].append(issue) + break + if label.name == "enhancement": + issues["enhancements"].append(issue) + break + if label.name == "documentation": + issues["documentation"].append(issue) + break + if label.name == "unittest": + issues["unittests"].append(issue) + break + else: + issues["other"].append(issue) + + for value in issues.values(): + value.reverse() + + for name in ( + "enhancements", "bugs", "documentation", "unittests", "other"): + if issues[name]: + print("#### %s" % name.title(), file=output) + for issue in issues[name.lower()]: + if issue.state != "closed": + logger.warning("Issue %s is not closed!", issue.number) + print( + "[%s](%s) - %s" % ( + issue.number, issue.url, issue.title), + file=output) + + return output.getvalue() + + def create_release( + self, recreate=False, prerelease=False, close_milestone=False, + dry_run=False): + """ + Creates a release for requested version. + + :raises RuntimeError: + Raised if a release for the given version already + exists and ``recreate`` is False + """ + if not dry_run and close_milestone: + self.milestone.edit(self.version, state="closed") + + for release in self.repo.get_releases(): + if release.tag_name == self.version: + if recreate: + logger.warning( + "Deleting existing release for %s", release.tag_name) + release.delete_release() + # TODO: make sure we delete the tag too + else: + raise RuntimeError( + "A release for %r already exists" % self.version) + + logger.info("Creating **draft** release %r", self.version) + message = self.release_message() + if not dry_run: + return self.repo.create_git_tag_and_release( + self.version, + "Tagged by release.py", + self.version, + message, + self.commit(), + "commit", + draft=True, prerelease=prerelease + ) + + else: + return message + + +AppVeyorArtifact = namedtuple( + "AppVeyorArtifact", ("path", "url", "unpacked", "build_success") +) + + +class AppVeyor(Session): + """ + The core class used for interacting with and downloading content + from AppVeyor. + + :keyword str branch: + The branch to download and retrieve information for. By default this + is set to the 'master' branch. + """ + API = "https://ci.appveyor.com/api" + API_PROJECT = API + "/projects/opalmer/pywincffi" + + def __init__(self, branch="master"): + self.session.mount(self.API, HTTPAdapter(max_retries=10)) + self.branch_name = branch + self.branch = self.json( + self.API_PROJECT + "/branch/%s" % self.branch_name) + self.message = self.branch["build"]["message"] + + def artifacts(self, directory=None, ignore_failures=False): + """ + Downloads the build artifacts to the requested directory. + + :keyword str directory: + The directory to download the artifacts to. By default a random + directory will be created for you if one is not provided. + + :keyword bool ignore_failures: + If True, only return the build artifacts if all jobs were + successful. This is False by default. + + :raises RuntimeError: + Raised if there is a problem retrieving or validating one + of the build artifacts. + + :rtype: iterator producing :class:`AppVeyorArtifact` + """ + + if directory is None: + directory = tempfile.mkdtemp() + + logger.debug("Downloading build artifacts to %s", directory) + + try: + os.makedirs(directory) + except (OSError, IOError, WindowsError) as error: # pragma: no cover + if error.errno != EEXIST: + raise + + for job in self.branch["build"]["jobs"]: + job_id = job["jobId"] + build_success = job["status"] == "success" + + if not ignore_failures and not build_success: + raise RuntimeError( + "Cannot publish a failed job. " + "(%r != success)." % job["status"]) + + # Iterate over and download all the artifacts + artifact_url = \ + self.API + "/buildjobs/{id}/artifacts".format(id=job_id) + + build_artifacts = self.json(artifact_url) + if not build_artifacts: + logger.warning( + "Build %s does not contain any artifacts", artifact_url) + + for artifact in build_artifacts: + if artifact["fileName"] == ".coverage": + continue + + if artifact["type"] not in ("File", "Zip"): # pragma: no cover + logger.debug("Artifact %r is not a file.", artifact) + continue + + # Download the file. + file_url = artifact_url + "/" + artifact["fileName"] + logger.info("Download and unpack %s", file_url) + local_path = join(directory, basename(artifact["fileName"])) + self.download(file_url, path=local_path) + + unpacked = True + if local_path.endswith(".whl"): + # Unpack the wheel to be sure the structure is correct. + # This helps to ensure that the download not incomplete + # or corrupt. We don't really care about the resulting + # files. + unpacked = check_wheel(local_path) + + yield AppVeyorArtifact( + path=local_path, url=file_url, + unpacked=unpacked, build_success=build_success) + + +def docs_built(version): + """Returns True if the docs have been built for the given version""" + response = Session.session.get( + "https://pywincffi.readthedocs.org/en/%s/" % version) + return response.status_code == OK diff --git a/pywincffi/kernel32/__init__.py b/pywincffi/kernel32/__init__.py index f66de84..0360f96 100644 --- a/pywincffi/kernel32/__init__.py +++ b/pywincffi/kernel32/__init__.py @@ -1,6 +1,6 @@ """ -Kernel32 -======== +Kernel32 Sub-Package +==================== Provides functions, constants and utilities that wrap the Windows kernel32 library. diff --git a/setup.py b/setup.py index 94a59b6..9778ff8 100644 --- a/setup.py +++ b/setup.py @@ -1,7 +1,15 @@ +from __future__ import print_function + import os import sys from errno import ENOENT +from os.path import dirname, abspath, join, isdir + from setuptools import setup, find_packages +from distutils.command.upload import upload + +from pywincffi import __version__ + try: WindowsError @@ -25,14 +33,73 @@ if sys.version_info[0:2] < (3, 4): requirements += ["enum34"] +ROOT = dirname(abspath(__file__)) +DISTS = join(ROOT, "dist") + + +class AppVeyorArtifactUpload(upload): + """ + A subclass of the normal upload command which + """ + def run(self): + if not isdir(DISTS): + print("%s does not exist" % DISTS, file=sys.stderr) + sys.exit(1) + + # Clean out everything in dist/* first. This ensures that + # if we have local files they'll be replaced by the artifacts + # that we're downloading. + for root, dirs, files in os.walk(DISTS): + for name in files: + os.remove(join(root, name)) + + from pywincffi.dev.release import AppVeyor + appveyor = AppVeyor() + + for artifact in appveyor.artifacts(directory=DISTS): + extension = artifact.path.split(".")[-1] + if extension not in ("whl", "zip", "msi", "exe"): + continue + + for root, dirs, files in os.walk(DISTS): + for filename in files: + if filename.endswith(".zip"): + command = "sdist" + pyversion = "none" + elif filename.endswith(".whl"): + command = "bdist_wheel" + _, _, pyversion, _, _ = filename.rstrip(".whl").split("-") + pyversion = ".".join(list(pyversion.lstrip("cp"))) + elif filename.endswith(".msi"): + command = "bdist_msi" + pyversion = \ + filename.rstrip(".msi").split("-")[-1].lstrip("py") + elif filename.endswith(".exe"): + command = "bdist_wininst" + raise NotImplementedError( + "Don't have `pyversion` implemented for %r" % filename) + else: + print( + "Unknown file type: %r" % filename.split(".")[-1], + file=sys.stderr) + sys.exit(1) + + filename = join(root, filename) + self.upload_file(command, pyversion, filename) + setup_keywords = dict( name="pywincffi", - version="0.1.0", + version=".".join(map(str, __version__)), + cmdclass={ + "upload_from_appveyor": AppVeyorArtifactUpload + }, packages=find_packages( include=("pywincffi*", ) ), include_package_data=True, author="Oliver Palmer", + author_email="oliverpalmer@opalmer.com", + url="http://github.com/opalmer/pywincffi", description="A Python library which wraps Windows functions using CFFI", long_description=long_description, setup_requires=requirements, diff --git a/tests/test_dev/test_release.py b/tests/test_dev/test_release.py new file mode 100644 index 0000000..a936bde --- /dev/null +++ b/tests/test_dev/test_release.py @@ -0,0 +1,447 @@ +import os +import hashlib +import tempfile +import shutil +import string +import subprocess +import sys +from collections import namedtuple +from random import randint, choice +from textwrap import dedent +from os.path import dirname, abspath, isfile, join, isdir, basename + +try: + from http.client import OK, BAD_REQUEST +except ImportError: + # pylint: disable=import-error,wrong-import-order + from httplib import OK, BAD_REQUEST + +from mock import Mock, patch +from github import Github +from requests.adapters import HTTPAdapter + +from pywincffi.core.config import config +from pywincffi.dev import release # used to mock top level functions +from pywincffi.dev.release import ( + Session, AppVeyor, AppVeyorArtifact, GitHubAPI, check_wheel, docs_built) +from pywincffi.dev.testutil import TestCase + + +class TestWheel(TestCase): + """ + Tests for constants of :func:`pywincffi.dev.release.test_wheel` + """ + def test_fails(self): + fd, path = tempfile.mkstemp() + self.addCleanup(os.remove, path) + + with os.fdopen(fd, "w") as file_: + file_.write("") + + self.assertFalse(check_wheel(path)) + + def test_success(self): + root_dir = dirname(dirname(dirname(abspath(__file__)))) + + # Be sure this exists otherwise the command below may fail for + # unexpected reasons. + setup_py = join(root_dir, "setup.py") + self.assertTrue(isfile(setup_py), "%s does not exist" % setup_py) + + wheels = [] + + while not wheels: + for root, _, files in os.walk(join(root_dir, "dist")): + for filename in files: + if filename.endswith(".whl"): + wheels.append(join(root, filename)) + + if wheels: + break + + # Build pywincffi if we need to. We should always be able to + # unpack our own library. + process = subprocess.Popen( + [sys.executable, "setup.py", "bdist_wheel"], + stderr=subprocess.PIPE, stdout=subprocess.PIPE, cwd=root_dir) + process.communicate() + self.assertEqual(process.returncode, 0) + + for path in wheels: + self.assertTrue(check_wheel(path), path) + + +class TestSession(TestCase): + """ + Tests for constants of :class:`pywincffi.dev.release.Session` + """ + DOWNLOAD_SHA1 = "89ff14348b410051fff2eb206183993f659d85e0" + DOWNLOAD_URL = \ + "https://raw.githubusercontent.com/opalmer/pywincffi/" \ + "master/.ci/appveyor/run_with_compiler.cmd" + + def setUp(self): + super(TestSession, self).setUp() + self.session = Session.session + self.session.mount("https://", HTTPAdapter(max_retries=100)) + + def test_check_code_success(self): + response = self.session.get(AppVeyor.API) + Session.check_code(response, OK) + + def test_check_code_failure(self): + response = self.session.get(AppVeyor.API) + + with self.assertRaises(RuntimeError): + Session.check_code(response, BAD_REQUEST) + + def test_json_success(self): + data = Session.json(AppVeyor.API_PROJECT) + self.assertEqual(data.get("project", {}).get("name", {}), "pywincffi") + + def test_json_failure(self): + with self.assertRaises(ValueError): + Session.json(AppVeyor.API) + + def test_download_random_path(self): + path = Session.download(self.DOWNLOAD_URL) + self.addCleanup(os.remove, path) + with open(path, "rb") as file_: + sha1 = hashlib.sha1(file_.read()) + + self.assertEqual(sha1.hexdigest(), self.DOWNLOAD_SHA1) + + def test_download_specific_path(self): + fd, path = tempfile.mkstemp() + os.close(fd) + self.addCleanup(os.remove, path) + + Session.download(self.DOWNLOAD_URL, path=path) + with open(path, "rb") as file_: + sha1 = hashlib.sha1(file_.read()) + + self.assertEqual(sha1.hexdigest(), self.DOWNLOAD_SHA1) + + +class TestAppVeyor(TestCase): + """ + Tests for constants of :class:`pywincffi.dev.release.AppVeyor` + """ + def setUp(self): + super(TestAppVeyor, self).setUp() + self.job_id = self.random_string() + self.artifact_url = None + self.artifact_path = None + self.branch = { + "build": { + "message": self.random_string(), + "jobs": [ + { + "jobId": self.job_id, + "status": "success" + } + ] + } + } + + with patch.object(Session, "json", return_value=self.branch): + self.appveyor = AppVeyor() + + def random_string(self): + return "".join( + [choice(string.ascii_letters) for _ in range(randint(5, 20))]) + + def test_creates_directory(self): + path = join(tempfile.gettempdir(), self.random_string()) + + with patch.object(Session, "json", return_value=[]): + list(self.appveyor.artifacts(directory=path)) + + self.assertTrue(isdir(path)) + self.addCleanup(shutil.rmtree, path, ignore_errors=True) + + def test_fails_for_unsuccessful_build(self): + self.appveyor.branch["build"]["jobs"][0]["status"] = "foo" + + with self.assertRaises(RuntimeError): + with patch.object(Session, "json", return_value=[]): + list(self.appveyor.artifacts()) + + def test_downloads_artifacts(self): + artifacts = [ + {"type": "File", "fileName": basename(TestSession.DOWNLOAD_URL)} + ] + + _download = Session.download + self.artifact_path = None + self.artifact_url = None + + def download(_, url, path=None): + expected_url = \ + AppVeyor.API + \ + "/buildjobs/{id}/artifacts".format(id=self.job_id) + \ + "/" + artifacts[0]["fileName"] + self.assertEqual(url, expected_url) + self.artifact_path = path + self.artifact_url = expected_url + + _download(TestSession.DOWNLOAD_URL, path=path) + + with patch.object(Session, "json", return_value=artifacts): + with patch.object(Session, "download", download): + results = list(self.appveyor.artifacts()) + + self.assertEqual( + results, [ + AppVeyorArtifact( + path=self.artifact_path, + url=self.artifact_url, + unpacked=True, build_success=True + ) + ] + ) + + def test_ignore_coverage(self): + artifacts = [ + {"type": "File", "fileName": ".coverage"} + ] + + _download = Session.download + self.artifact_path = None + self.artifact_url = None + + def download(_, url, path=None): + _download(TestSession.DOWNLOAD_URL, path=path) + + with patch.object(Session, "json", return_value=artifacts): + with patch.object(Session, "download", download): + for _ in self.appveyor.artifacts(): + self.fail("There should be nothing to iterate over") + + def test_checks_wheel(self): + artifacts = [ + {"type": "File", "fileName": "foobar.whl"} + ] + + _download = Session.download + self.artifact_path = None + self.artifact_url = None + directory = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, directory, ignore_errors=True) + + def download(_, url, path=None): + _download(TestSession.DOWNLOAD_URL, path=path) + + with patch.object(release, "check_wheel") as mocked: + with patch.object(Session, "json", return_value=artifacts): + with patch.object(Session, "download", download): + list(self.appveyor.artifacts(directory=directory)) + + mocked.assert_called_with(join(directory, "foobar.whl")) + + +class GitHubAPICase(TestCase): + """ + The base class for all test cases of :class:`GitHubAPI`. This is + required so that the tests don't require an authentication + token and so we can avoid hitting GitHub's API. + """ + def setUp(self): + super(GitHubAPICase, self).setUp() + self.version = "0.0.0" + + # The test token + self.token = "fake_token" + github_token = config.get("pywincffi", "github_token") + config.set("pywincffi", "github_token", self.token) + self.addCleanup(config.set, "pywincffi", "github_token", github_token) + + # Mocks for the Github class so we don't make any API calls + self.mocked_get_repo = patch.object( + Github, "get_repo", + return_value=Mock( + get_milestones=lambda: [Mock(title=self.version)])) + self.mocked_get_repo.start() + self.addCleanup(self.mocked_get_repo.stop) + + +class TestGitHubAPIInit(GitHubAPICase): + """ + Tests for :meth:`pywincffi.dev.release.GitHubAPI.__init__` + """ + def test_version(self): + api = GitHubAPI(self.version) + self.assertEqual(api.version, self.version) + + def test_branch_default(self): + api = GitHubAPI(self.version) + self.assertEqual(api.branch, "master") + + def test_branch_non_default(self): + api = GitHubAPI(self.version, branch="foobar") + self.assertEqual(api.branch, "foobar") + + def test_token_not_set(self): + config.set("pywincffi", "github_token", "") + with self.assertRaises(RuntimeError): + GitHubAPI(self.version) + + def test_milestone_not_found(self): + self._cleanups.remove((self.mocked_get_repo.stop, (), {})) + self.mocked_get_repo.stop() + mock = patch.object( + Github, "get_repo", + return_value=Mock( + get_milestones=lambda: [Mock(title="x.x.x")])) + mock.start() + self.addCleanup(mock.stop) + with self.assertRaises(ValueError): + GitHubAPI(self.version) + + +class TestGitHubAPICommit(GitHubAPICase): + """ + Tests for :meth:`pywincffi.dev.release.GitHubAPI.commit` + """ + def test_commit(self): + api = GitHubAPI(self.version) + expected = "da39a3ee5e6b4b0d3255bfef95601890afd80709" + with patch.object( + api.repo, "get_branch", return_value=Mock( + commit=Mock(sha=expected))): + self.assertEqual(api.commit(), expected) + + +class TestGitHubAPIReleaseMessage(GitHubAPICase): + """ + Tests for :meth:`pywincffi.dev.release.GitHubAPI.release_message` + """ + def test_gets_all_issues(self): + api = GitHubAPI(self.version) + + with patch.object(api.repo, "get_issues", return_value=[]) as mocked: + api.release_message() + + mocked.assert_called_with(milestone=api.milestone, state="all") + + def test_message(self): + label = namedtuple("Label", ("name", )) + + issues = [ + Mock(number=1, url="/1", title="Issue 1", state="closed", + labels=[label(name="unittest")]), + Mock(number=3, url="/3", title="Issue 3", state="closed", + labels=[label(name="enhancement")]), + Mock(number=2, url="/2", title="Issue 2", state="closed", + labels=[label(name="enhancement")]), + Mock(number=4, url="/4", title="Issue 4", state="closed", + labels=[label(name="bug")]), + Mock(number=5, url="/5", title="Issue 5", state="closed", + labels=[label(name="enhancement"), label(name="bug")]), + Mock(number=6, url="/6", title="Issue 6", state="closed", + labels=[]), + Mock(number=7, url="/7", title="Issue 7", state="closed", + labels=[label(name="documentation")]) + ] + + api = GitHubAPI(self.version) + + with patch.object(api.repo, "get_issues", return_value=issues): + self.assertEqual(api.release_message().strip(), dedent(""" + ## External Links + Links for documentation, release files and other useful information. + * [Documentation](%s) + * [PyPi Package](%s) + * [GitHub Issues](%s) + + ## Pull Requests and Issues + Pull requests and issues associated with this release. + + #### Enhancements + [5](/5) - Issue 5 + [2](/2) - Issue 2 + [3](/3) - Issue 3 + #### Bugs + [4](/4) - Issue 4 + #### Documentation + [7](/7) - Issue 7 + #### Unittests + [1](/1) - Issue 1 + #### Other + [6](/6) - Issue 6 + """).strip() % ( + api.read_the_docs, api.pypi_release, api.milestone_filter)) + + +class TestGitHubAPICreateRelease(GitHubAPICase): + """ + Tests for :meth:`pywincffi.dev.release.GitHubAPI.create_release` + """ + def setUp(self): + super(TestGitHubAPICreateRelease, self).setUp() + self.api = GitHubAPI(self.version) + mock = patch.object(self.api, "release_message", return_value="foobar") + mock.start() + self.addCleanup(mock.stop) + + def set_releases(self, value): + mock = patch.object(self.api.repo, "get_releases", return_value=value) + mock.start() + self.addCleanup(mock.stop) + return mock + + def test_dry_run(self): + self.set_releases([]) + + # Exceptions will be raised if dry_run actually tries to do + # something + self.assertEqual(self.api.create_release(dry_run=True), "foobar") + + def test_closes_milestone(self): + self.set_releases([]) + + with patch.object(self.api.milestone, "edit") as mocked: + self.api.create_release(close_milestone=True) + + mocked.assert_called_with(self.version, state="closed") + + def test_create_tag_and_release_fails_without_recreate(self): + self.set_releases([Mock(tag_name=self.version)]) + + with self.assertRaisesRegex(RuntimeError, + ".*%r already exists.*" % self.version): + self.api.create_release() + + def test_create_tag_and_release_deletes_existing(self): + release_tag = Mock(tag_name=self.version) + self.set_releases([release_tag]) + self.api.create_release(recreate=True) + self.assertEqual(release_tag.delete_release.call_count, 1) + + def test_create_tag_and_release_arguments(self): + self.set_releases([]) + + with patch.object(self.api.repo, + "create_git_tag_and_release") as mocked: + self.api.create_release(recreate=True) + + mocked.assert_called_with( + self.api.version, + "Tagged by release.py", + self.version, + self.api.release_message(), + self.api.commit(), + "commit", + draft=True, prerelease=False + ) + + +class TestDocsBuilt(TestCase): + """ + Tests for :func:`pywincffi.dev.release.GitHubAPI.docs_built` + """ + def test_success(self): + self.assertTrue(docs_built("latest")) + + def test_failure(self): + self.assertFalse(docs_built("does_not_exist")) diff --git a/tools/release.py b/tools/release.py new file mode 100755 index 0000000..804004a --- /dev/null +++ b/tools/release.py @@ -0,0 +1,146 @@ +#!/usr/bin/env python + +from __future__ import with_statement + +import argparse +import subprocess +import sys +from os.path import dirname, abspath + +try: + WindowsError +except NameError: # pragma: no cover + WindowsError = OSError + +import requests + +ROOT = dirname(dirname(abspath(__file__))) + +# Add the root of the repo to sys.path so +# we can import pywcinffi directly. +sys.path.insert(0, ROOT) + +from pywincffi import __version__ +from pywincffi.core.logger import get_logger +from pywincffi.dev.release import GitHubAPI, AppVeyor, docs_built + +APPVEYOR_API = "https://ci.appveyor.com/api" +APPVEYOR_API_PROJ = APPVEYOR_API + "/projects/opalmer/pywincffi" + +session = requests.Session() +session.headers.update({ + "Accept": "application/json", + "Content-Type": "application/json" +}) + +logger = get_logger("dev.release") + + +def should_continue(question, skip=False): + """ + Asks a question, returns True if the answer is yes. Calls + ``sys.exit(1) if not.""" + if skip: + print("%s < 'y'" % question) + return True + + try: + answer = raw_input(question) + except NameError: + answer = input(question) + + if answer != "y": + print("Stopping.") + sys.exit(1) + + +def parse_arguments(): + """Constructs an argument parser and returns parsed arguments""" + parser = argparse.ArgumentParser(description="Cuts a release of pywincffi") + parser.add_argument( + "--confirm", action="store_true", default=False, + help="If provided, do not ask any questions and answer 'yes' to all " + "queries." + ) + parser.add_argument( + "-n", "--dry-run", action="store_true", default=False, + help="If provided, don't do anything destructive." + ) + parser.add_argument( + "--skip-pypi", action="store_true", default=False, + help="If provided, do not upload the release to pypi." + ) + parser.add_argument( + "--skip-github", action="store_true", default=False, + help="If provided, do not create a release on GitHub." + ) + parser.add_argument( + "--keep-milestone-open", action="store_true", default=False, + help="If provided, do not close the milestone" + ) + parser.add_argument( + "--download-artifacts", + help="If provided, download artifacts to this directory. The setup.py " + "will redownload the files, this is mostly for testing." + ) + parser.add_argument( + "--recreate", action="store_true", default=False, + help="If provided, recreate the release" + ) + return parser.parse_args() + + +def main(): + args = parse_arguments() + version = ".".join(map(str, __version__)) + + # Make sure we really want to create a release of this version. + should_continue( + "Create release of version %s? [y/n] " % version, + skip=args.confirm + ) + + if not args.skip_github: + github = GitHubAPI(version) + + if github.milestone.state != "closed": + should_continue( + "GitHub milestone %s is still open, continue? [y/n]" % version, + skip=args.confirm) + + release = github.create_release( + recreate=args.recreate, dry_run=args.dry_run, + close_milestone=not args.keep_milestone_open) + + if args.dry_run: + print(release) + + # TODO: Hack around in PyGitHub's request context so we can + # upload release artifacts + logger.warning("You must manually upload release artifacts") + + logger.info("Created GitHub release") + + if args.download_artifacts: + appveyor = AppVeyor() + for _ in appveyor.artifacts(directory=args.download_artifacts): + continue + + logger.info("Downloaded build artifacts to %s", args.download_artifacts) + + if not args.skip_pypi: + subprocess.check_call([ + sys.executable, "setup.py", "register"], + cwd=ROOT + ) + subprocess.check_call([ + sys.executable, "setup.py", "upload_from_appveyor"], + cwd=ROOT + ) + logger.info("Created PyPi release") + + if not docs_built(version): + logger.error("Documentation not built for %s", version) + +if __name__ == "__main__": + main() diff --git a/tools_requirements.txt b/tools_requirements.txt new file mode 100644 index 0000000..54763ad --- /dev/null +++ b/tools_requirements.txt @@ -0,0 +1,3 @@ +requests +gitpython +wheel