From 06af7ed2e8d8e19b2b3abcdb588c106b9b63252a Mon Sep 17 00:00:00 2001 From: Sebastian Weigand Date: Mon, 17 May 2021 01:09:00 +0200 Subject: [PATCH] Use ci build wheel to build more wheels (#76) * Changed workflow to use joerick/cibuildwheel for building and testing * Added non-native architectures on Linux * Split up architectures for quicker runs * Splitup built targets to more workers for faster overall built time * Moved linting step to python-publish.yml * Added local built artifacts to .gitignore * Only emulate architectures on Linux if it isn't 'auto' * Removed python-test.yml and renamed python-publish.yml to tests.yml To get the coverage upload on linux to work properly there were some 'hacks' needed since the linux build and tests run inside docker. Those hack are: - temporary renaming kernprof.py so tests only use the installed one - replacing the file paths inside the .coverage files, so they will be valid in the host environment (needed for coverage combine) - copying the .coverage file to the wheel output directory, so they are available on the host * Moved python-sdist-test.yml code to tests.yml Since the sdist test is manly to ensure that all needed files are included in the sdist, IMHO it is enought to hest it with one python version, since the coplebility test is done by the wheel tests. * Made deploy only run on tags * Removed branch restrictions * Added back 'test_version_agreement' * Update cibuildwheel to 1.11.0 * Added dependabot config to to check github actions for updates --- .github/dependabot.yml | 8 + .github/workflows/python-publish.yml | 127 ------------ .github/workflows/python-sdist-test.yml | 47 ----- .github/workflows/python-test.yml | 77 ------- .github/workflows/tests.yml | 265 ++++++++++++++++++++++++ .gitignore | 2 + README.rst | 8 +- run_tests.py | 81 ++++++++ tests/test_cli.py | 7 +- 9 files changed, 363 insertions(+), 259 deletions(-) create mode 100644 .github/dependabot.yml delete mode 100644 .github/workflows/python-publish.yml delete mode 100644 .github/workflows/python-sdist-test.yml delete mode 100644 .github/workflows/python-test.yml create mode 100644 .github/workflows/tests.yml diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000..6a61862a --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,8 @@ +version: 2 +updates: + # Maintain dependencies for GitHub Actions + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" + day: "friday" diff --git a/.github/workflows/python-publish.yml b/.github/workflows/python-publish.yml deleted file mode 100644 index 11afa319..00000000 --- a/.github/workflows/python-publish.yml +++ /dev/null @@ -1,127 +0,0 @@ -# This workflow will install Python dependencies, run tests and lint with a variety of Python versions -# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions - -name: python_publish - -on: - push: - # This will only execute on the release branch - branches: [ release ] - -jobs: - build: - - runs-on: ubuntu-latest - strategy: - matrix: - python-version: [3.6, 3.7, 3.8, 3.9] - - steps: - - uses: actions/checkout@v2 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - rm -rf .git # remove git repo which disable local versioning for publishing - python -m pip install --upgrade pip - python -m pip install flake8 pytest - #if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - #- name: Lint with flake8 - # run: | - # # stop the build if there are Python syntax errors or undefined names - # flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics - # # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide - # flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics - - name: Build Wheel - run: | - ./run_manylinux_build.sh - echo "=====" - - name: Install Wheel - run: | - ls -al - ls -al wheelhouse - MB_PYTHON_TAG=$(python -c "import setup; print(setup.MB_PYTHON_TAG)") - VERSION=$(python -c "import setup; print(setup.VERSION)") - echo "MB_PYTHON_TAG = $MB_PYTHON_TAG" - echo "VERSION = $VERSION" - BDIST_WHEEL_PATH=$(ls wheelhouse/*-${VERSION}-${MB_PYTHON_TAG}-*2010_x86_64.whl) - echo "BDIST_WHEEL_PATH = $BDIST_WHEEL_PATH" - python -m pip install $BDIST_WHEEL_PATH[all] - - name: Test Wheel - run: | - python run_tests.py - - name: Sign and Publish - env: - # Toggle comments to publish to the test pypi instead of the real one - #TWINE_REPOSITORY_URL: https://upload.pypi.org/legacy/ - #PYUTILS_TWINE_USERNAME: ${{ secrets.PYUTILS_TWINE_USERNAME }} - #PYUTILS_TWINE_PASSWORD: ${{ secrets.PYUTILS_TWINE_PASSWORD }} - TEST_TWINE_REPOSITORY_URL: https://test.pypi.org/legacy/ - PYUTILS_TEST_TWINE_USERNAME: ${{ secrets.PYUTILS_TEST_TWINE_USERNAME }} - PYUTILS_TEST_TWINE_PASSWORD: ${{ secrets.PYUTILS_TEST_TWINE_PASSWORD }} - run: | - ls -al - GPG_EXECUTABLE=gpg - $GPG_EXECUTABLE --version - openssl version - $GPG_EXECUTABLE --list-keys - export PYUTILS_CI_GITHUB_SECRET=${{ secrets.PYUTILS_CI_GITHUB_SECRET }} - GLKWS=$PYUTILS_CI_GITHUB_SECRET openssl enc -aes-256-cbc -pbkdf2 -md SHA512 -pass env:GLKWS -d -a -in dev/cci_public_gpg_key.pgp.enc | $GPG_EXECUTABLE --import - GLKWS=$PYUTILS_CI_GITHUB_SECRET openssl enc -aes-256-cbc -pbkdf2 -md SHA512 -pass env:GLKWS -d -a -in dev/cci_gpg_owner_trust.enc | $GPG_EXECUTABLE --import-ownertrust - GLKWS=$PYUTILS_CI_GITHUB_SECRET openssl enc -aes-256-cbc -pbkdf2 -md SHA512 -pass env:GLKWS -d -a -in dev/cci_secret_gpg_key.pgp.enc | $GPG_EXECUTABLE --import - $GPG_EXECUTABLE --list-keys || echo "first one fails for some reason" - $GPG_EXECUTABLE --list-keys - MB_PYTHON_TAG=$(python -c "import setup; print(setup.MB_PYTHON_TAG)") - VERSION=$(python -c "import setup; print(setup.VERSION)") - pip install twine - pip install six pyopenssl ndg-httpsclient pyasn1 -U --user - pip install requests[security] twine --user - GPG_KEYID=$(cat dev/public_gpg_key) - echo "GPG_KEYID = '$GPG_KEYID'" - # - #export TWINE_REPOSITORY_URL=https://test.pypi.org/legacy/ - #export PYUTILS_TEST_TWINE_USERNAME=${{ secrets.PYUTILS_TEST_TWINE_USERNAME }} - #export PYUTILS_TEST_TWINE_PASSWORD=${{ secrets.PYUTILS_TEST_TWINE_PASSWORD }} - #MB_PYTHON_TAG=$MB_PYTHON_TAG \ - # DO_GPG=True GPG_KEYID=$GPG_KEYID \ - # TWINE_REPOSITORY_URL=${TWINE_REPOSITORY_URL} \ - # TWINE_USERNAME=$PYUTILS_TEST_TWINE_USERNAME \ - # TWINE_PASSWORD=$PYUTILS_TEST_TWINE_PASSWORD \ - # GPG_EXECUTABLE=$GPG_EXECUTABLE \ - # DO_UPLOAD=True \ - # DO_TAG=False ./publish.sh - # - export TWINE_REPOSITORY_URL=https://upload.pypi.org/legacy/ - export PYUTILS_TWINE_USERNAME=${{ secrets.PYUTILS_TWINE_USERNAME }} - export PYUTILS_TWINE_PASSWORD=${{ secrets.PYUTILS_TWINE_PASSWORD }} - MB_PYTHON_TAG=$MB_PYTHON_TAG \ - DO_GPG=True GPG_KEYID=$GPG_KEYID \ - TWINE_PASSWORD=$PYUTILS_TWINE_PASSWORD \ - TWINE_USERNAME=$PYUTILS_TWINE_USERNAME \ - GPG_EXECUTABLE=$GPG_EXECUTABLE \ - DO_UPLOAD=True \ - DO_TAG=False ./publish.sh - -### -# Unfortunately we cant (yet) use the yaml docstring trick here -# https://github.community/t/allow-unused-keys-in-workflow-yaml-files/172120 -#__doc__: | -# # How to run locally -# # https://packaging.python.org/guides/using-testpypi/ -# cd $HOME/code -# git clone https://github.com/nektos/act.git $HOME/code/act -# cd $HOME/code/act -# chmod +x install.sh -# ./install.sh -b $HOME/.local/opt/act -# cd $HOME/code/line_profiler - -# load_secrets -# unset GITHUB_TOKEN -# $HOME/.local/opt/act/act \ -# --secret=PYUTILS_TWINE_PASSWORD=$PYUTILS_TWINE_PASSWORD \ -# --secret=PYUTILS_TWINE_USERNAME=$PYUTILS_TWINE_USERNAME \ -# --secret=PYUTILS_CI_GITHUB_SECRET=$PYUTILS_CI_GITHUB_SECRET \ -# --secret=PYUTILS_TEST_TWINE_USERNAME=$PYUTILS_TEST_TWINE_USERNAME \ -# --secret=PYUTILS_TEST_TWINE_PASSWORD=$PYUTILS_TEST_TWINE_PASSWORD diff --git a/.github/workflows/python-sdist-test.yml b/.github/workflows/python-sdist-test.yml deleted file mode 100644 index ae04a38b..00000000 --- a/.github/workflows/python-sdist-test.yml +++ /dev/null @@ -1,47 +0,0 @@ -# This workflow will install Python dependencies, run tests and lint with a variety of Python versions -# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions - -name: python_sdist_test - -on: - push: - branches: [ master, dev/github_actions ] - pull_request: - branches: [ master ] - -jobs: - build: - - runs-on: ubuntu-latest - strategy: - matrix: - python-version: [3.5, 3.6, 3.7, 3.8, 3.9] - - steps: - - uses: actions/checkout@v2 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - - name: Upgrade pip - run: | - python -m pip install --upgrade pip - python -m pip install -r requirements/build.txt - python -m pip install -r requirements/tests.txt - - name: Build sdist - run: | - python setup.py sdist - - name: Install sdist - run: | - cd dist - ls -al - pip install line_profiler*.tar.gz -v - - name: Test sdist - run: | - pwd - ls -al - # Ensure the source doesn't conflict with the test - rm -rf line_profiler - rm -rf kernprof.py - # cd .. - python run_tests.py diff --git a/.github/workflows/python-test.yml b/.github/workflows/python-test.yml deleted file mode 100644 index b104c08a..00000000 --- a/.github/workflows/python-test.yml +++ /dev/null @@ -1,77 +0,0 @@ -# This workflow will install Python dependencies, run tests and lint with a variety of Python versions -# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions - -name: python_test - -on: - push: - branches: [ master, dev/github_actions ] - pull_request: - branches: [ master ] - -jobs: - build: - - runs-on: ubuntu-latest - strategy: - matrix: - python-version: [3.6, 3.7, 3.8, 3.9] - - steps: - - uses: actions/checkout@v2 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - python -m pip install --upgrade pip - python -m pip install flake8 pytest - #if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - - name: Lint with flake8 - run: | - # stop the build if there are Python syntax errors or undefined names - flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics - # # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide - # flake8 . --count --exit-zero --max-complexity=20 --max-line-length=127 --statistics - - name: Build Wheel - run: | - ./run_manylinux_build.sh - echo "=====" - - name: Install Wheel - run: | - ls -al - ls -al wheelhouse - MB_PYTHON_TAG=$(python -c "import setup; print(setup.MB_PYTHON_TAG)") - VERSION=$(python -c "import setup; print(setup.VERSION)") - echo "MB_PYTHON_TAG = $MB_PYTHON_TAG" - echo "VERSION = $VERSION" - BDIST_WHEEL_PATH=$(ls wheelhouse/*-${VERSION}-${MB_PYTHON_TAG}-*2010_x86_64.whl) - echo "BDIST_WHEEL_PATH = $BDIST_WHEEL_PATH" - python -m pip install $BDIST_WHEEL_PATH[all] - - name: Test Wheel - run: | - python run_tests.py - - name: Codecov Upload - uses: codecov/codecov-action@v1 - with: - file: ./tests/coverage.xml - -#__doc__: | -# # How to run locally -# # https://packaging.python.org/guides/using-testpypi/ -# cd $HOME/code -# git clone https://github.com/nektos/act.git $HOME/code/act -# cd $HOME/code/act -# chmod +x install.sh -# ./install.sh -b $HOME/.local/opt/act -# cd $HOME/code/line_profiler - -# load_secrets -# unset GITHUB_TOKEN -# $HOME/.local/opt/act/act \ -# --secret=PYUTILS_TWINE_PASSWORD=$PYUTILS_TWINE_PASSWORD \ -# --secret=PYUTILS_TWINE_USERNAME=$PYUTILS_TWINE_USERNAME \ -# --secret=PYUTILS_CI_GITHUB_SECRET=$PYUTILS_CI_GITHUB_SECRET \ -# --secret=PYUTILS_TEST_TWINE_USERNAME=$PYUTILS_TEST_TWINE_USERNAME \ -# --secret=PYUTILS_TEST_TWINE_PASSWORD=$PYUTILS_TEST_TWINE_PASSWORD diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 00000000..fadaf7bf --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,265 @@ +# This workflow will install Python dependencies, run tests and lint with a variety of Python versions +# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions + +name: Tests + +on: + push: + pull_request: + branches: [ master ] + +jobs: + lint: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.8 + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: Install dependencies + run: | + python -m pip install --upgrade pip + python -m pip install flake8 + - name: Lint with flake8 + run: | + # stop the build if there are Python syntax errors or undefined names + flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics + # # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide + # flake8 . --count --exit-zero --max-complexity=20 --max-line-length=127 --statistics + + build_and_test_sdist: + name: Test sdist Python 3.8 + runs-on: ubuntu-latest + needs: [lint] + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.8 + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: Upgrade pip + run: | + python -m pip install --upgrade pip + python -m pip install -r requirements/build.txt + python -m pip install -r requirements/tests.txt + - name: Build sdist + run: | + python setup.py sdist + - name: Install sdist + run: | + cd dist + ls -al + pip install line_profiler*.tar.gz -v + - name: Test sdist + run: | + pwd + ls -al + # Ensure the source doesn't conflict with the test + rm -rf line_profiler + rm -rf kernprof.py + # cd .. + python run_tests.py + + - name: Upload sdist artifact + uses: actions/upload-artifact@v2 + with: + name: wheels + path: ./dist/*.tar.gz + + build_and_test_wheels: + name: ${{ matrix.cibw_build }} on ${{ matrix.os }}, arch=${{ matrix.arch }} + runs-on: ${{ matrix.os }} + needs: [lint] + strategy: + matrix: + os: [ubuntu-latest, windows-latest, macOS-latest] + arch: [auto] + cibw_build: [cp3*-*] + cibw_skip: ["*-win32"] + # Add additional workers to reduce overall build time + include: + - os: windows-latest + cibw_build: cp3*-win32 + arch: auto + cibw_skip: "" + - os: ubuntu-latest + arch: aarch64 + cibw_build: cp35-* + - os: ubuntu-latest + arch: aarch64 + cibw_build: cp36-* + - os: ubuntu-latest + arch: aarch64 + cibw_build: cp37-* + - os: ubuntu-latest + arch: aarch64 + cibw_build: cp38-* + - os: ubuntu-latest + arch: aarch64 + cibw_build: cp39-* + + + steps: + - name: Checkout source + uses: actions/checkout@v2 + + # Configure compilers for Windows 64bit. + - name: Enable MSVC 64bit + if: matrix.os == 'windows-latest' && matrix.cibw_build != 'cp3*-win32' + uses: ilammy/msvc-dev-cmd@v1 + + # Configure compilers for Windows 32bit. + - name: Enable MSVC 32bit + if: matrix.os == 'windows-latest' && matrix.cibw_build == 'cp3*-win32' + uses: ilammy/msvc-dev-cmd@v1 + with: + arch: x86 + + # Emulate aarch64 ppc64le s390x under linux + - name: Set up QEMU + if: runner.os == 'Linux' && matrix.arch != 'auto' + uses: docker/setup-qemu-action@v1 + with: + platforms: all + + - name: Build wheels + uses: joerick/cibuildwheel@v1.11.0 + with: + output-dir: wheelhouse + # to supply options, put them in 'env', like: + env: + CIBW_SKIP: ${{ matrix.cibw_skip }} + CIBW_BUILD: ${{ matrix.cibw_build }} + CIBW_TEST_REQUIRES: -r requirements/tests.txt + CIBW_TEST_COMMAND: python {project}/run_tests.py + # configure cibuildwheel to build native archs ('auto'), or emulated ones + CIBW_ARCHS_LINUX: ${{ matrix.arch }} + + - name: Show built files + shell: bash + run: ls -la wheelhouse + + - name: Set up Python 3.8 to combine coverage Linux + if: runner.os == 'Linux' + uses: actions/setup-python@v2 + with: + python-version: 3.8 + + - name: Combine coverage Linux + if: runner.os == 'Linux' + run: | + echo '############ PWD' + pwd + python -m pip install coverage[toml] + echo '############ combine' + coverage combine ./wheelhouse + echo '############ XML' + coverage xml -o ./tests/coverage.xml + echo '############ FIND' + find . -name .coverage.* + find . -name coverage.xml + + - name: Codecov Upload + uses: codecov/codecov-action@v1 + with: + file: ./tests/coverage.xml + + - name: Upload wheels artifact + uses: actions/upload-artifact@v2 + with: + name: wheels + path: ./wheelhouse/*.whl + + deploy: + name: Uploading to PyPi + runs-on: ubuntu-latest + if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags') + needs: [build_and_test_wheels, build_and_test_sdist] + steps: + - name: Checkout source + uses: actions/checkout@v2 + + - name: Download wheels and sdist + uses: actions/download-artifact@v2 + with: + name: wheels + path: dist + + - name: Show files to upload + shell: bash + run: ls -la dist + - name: Sign and Publish + env: + # Toggle comments to publish to the test pypi instead of the real one + #TWINE_REPOSITORY_URL: https://upload.pypi.org/legacy/ + #PYUTILS_TWINE_USERNAME: ${{ secrets.PYUTILS_TWINE_USERNAME }} + #PYUTILS_TWINE_PASSWORD: ${{ secrets.PYUTILS_TWINE_PASSWORD }} + TEST_TWINE_REPOSITORY_URL: https://test.pypi.org/legacy/ + PYUTILS_TEST_TWINE_USERNAME: ${{ secrets.PYUTILS_TEST_TWINE_USERNAME }} + PYUTILS_TEST_TWINE_PASSWORD: ${{ secrets.PYUTILS_TEST_TWINE_PASSWORD }} + run: | + ls -al + GPG_EXECUTABLE=gpg + $GPG_EXECUTABLE --version + openssl version + $GPG_EXECUTABLE --list-keys + export PYUTILS_CI_GITHUB_SECRET=${{ secrets.PYUTILS_CI_GITHUB_SECRET }} + GLKWS=$PYUTILS_CI_GITHUB_SECRET openssl enc -aes-256-cbc -pbkdf2 -md SHA512 -pass env:GLKWS -d -a -in dev/cci_public_gpg_key.pgp.enc | $GPG_EXECUTABLE --import + GLKWS=$PYUTILS_CI_GITHUB_SECRET openssl enc -aes-256-cbc -pbkdf2 -md SHA512 -pass env:GLKWS -d -a -in dev/cci_gpg_owner_trust.enc | $GPG_EXECUTABLE --import-ownertrust + GLKWS=$PYUTILS_CI_GITHUB_SECRET openssl enc -aes-256-cbc -pbkdf2 -md SHA512 -pass env:GLKWS -d -a -in dev/cci_secret_gpg_key.pgp.enc | $GPG_EXECUTABLE --import + $GPG_EXECUTABLE --list-keys || echo "first one fails for some reason" + $GPG_EXECUTABLE --list-keys + MB_PYTHON_TAG=$(python -c "import setup; print(setup.MB_PYTHON_TAG)") + VERSION=$(python -c "import setup; print(setup.VERSION)") + pip install twine + pip install six pyopenssl ndg-httpsclient pyasn1 -U --user + pip install requests[security] twine --user + GPG_KEYID=$(cat dev/public_gpg_key) + echo "GPG_KEYID = '$GPG_KEYID'" + # + #export TWINE_REPOSITORY_URL=https://test.pypi.org/legacy/ + #export PYUTILS_TEST_TWINE_USERNAME=${{ secrets.PYUTILS_TEST_TWINE_USERNAME }} + #export PYUTILS_TEST_TWINE_PASSWORD=${{ secrets.PYUTILS_TEST_TWINE_PASSWORD }} + #MB_PYTHON_TAG=$MB_PYTHON_TAG \ + # DO_GPG=True GPG_KEYID=$GPG_KEYID \ + # TWINE_REPOSITORY_URL=${TWINE_REPOSITORY_URL} \ + # TWINE_USERNAME=$PYUTILS_TEST_TWINE_USERNAME \ + # TWINE_PASSWORD=$PYUTILS_TEST_TWINE_PASSWORD \ + # GPG_EXECUTABLE=$GPG_EXECUTABLE \ + # DO_UPLOAD=True \ + # DO_TAG=False ./publish.sh + # + export TWINE_REPOSITORY_URL=https://upload.pypi.org/legacy/ + export PYUTILS_TWINE_USERNAME=${{ secrets.PYUTILS_TWINE_USERNAME }} + export PYUTILS_TWINE_PASSWORD=${{ secrets.PYUTILS_TWINE_PASSWORD }} + MB_PYTHON_TAG=$MB_PYTHON_TAG \ + DO_GPG=True GPG_KEYID=$GPG_KEYID \ + TWINE_PASSWORD=$PYUTILS_TWINE_PASSWORD \ + TWINE_USERNAME=$PYUTILS_TWINE_USERNAME \ + GPG_EXECUTABLE=$GPG_EXECUTABLE \ + DO_UPLOAD=True \ + DO_TAG=False ./publish.sh + +### +# Unfortunately we cant (yet) use the yaml docstring trick here +# https://github.community/t/allow-unused-keys-in-workflow-yaml-files/172120 +#__doc__: | +# # How to run locally +# # https://packaging.python.org/guides/using-testpypi/ +# cd $HOME/code +# git clone https://github.com/nektos/act.git $HOME/code/act +# cd $HOME/code/act +# chmod +x install.sh +# ./install.sh -b $HOME/.local/opt/act +# cd $HOME/code/line_profiler + +# load_secrets +# unset GITHUB_TOKEN +# $HOME/.local/opt/act/act \ +# --secret=PYUTILS_TWINE_PASSWORD=$PYUTILS_TWINE_PASSWORD \ +# --secret=PYUTILS_TWINE_USERNAME=$PYUTILS_TWINE_USERNAME \ +# --secret=PYUTILS_CI_GITHUB_SECRET=$PYUTILS_CI_GITHUB_SECRET \ +# --secret=PYUTILS_TEST_TWINE_USERNAME=$PYUTILS_TEST_TWINE_USERNAME \ +# --secret=PYUTILS_TEST_TWINE_PASSWORD=$PYUTILS_TEST_TWINE_PASSWORD diff --git a/.gitignore b/.gitignore index 2d25b25c..a7f584d5 100644 --- a/.gitignore +++ b/.gitignore @@ -5,12 +5,14 @@ *.pyc *.pyo +*.pyd *.so *.o *.a build/ dist/ +_skbuild _line_profiler.c line_profiler.egg-info/ MANIFEST diff --git a/README.rst b/README.rst index b81fdf23..b3cbea70 100644 --- a/README.rst +++ b/README.rst @@ -1,7 +1,7 @@ line_profiler and kernprof -------------------------- -|Pypi| |Downloads| |CircleCI| |ActionsTest| |ActionsPublish| |Codecov| +|Pypi| |Downloads| |CircleCI| |ActionsTest| |Codecov| NOTICE: This is the official `line_profiler` repository. The most recent @@ -394,7 +394,5 @@ See `CHANGELOG`_. :target: https://pypi.python.org/pypi/line_profiler .. |Downloads| image:: https://img.shields.io/pypi/dm/line_profiler.svg :target: https://pypistats.org/packages/line_profiler -.. |ActionsTest| image:: https://github.com/pyutils/line_profiler/actions/workflows/python-test.yml/badge.svg - :target: https://github.com/pyutils/line_profiler/actions/workflows/python-test.yml -.. |ActionsPublish| image:: https://github.com/pyutils/line_profiler/actions/workflows/python-publish.yml/badge.svg - :target: https://github.com/pyutils/line_profiler/actions/workflows/python-publish.yml +.. |ActionsTest| image:: https://github.com/pyutils/line_profiler/actions/workflows/tests.yml/badge.svg + :target: https://github.com/pyutils/line_profiler/actions/workflows/tests.yml diff --git a/run_tests.py b/run_tests.py index e4de1ca6..84f5ef23 100755 --- a/run_tests.py +++ b/run_tests.py @@ -1,8 +1,77 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- from os.path import dirname, join, abspath +import sqlite3 import sys import os +import re + +def is_cibuildwheel(): + """Check if run with cibuildwheel.""" + return 'CIBUILDWHEEL' in os.environ + +def temp_rename_kernprof(repo_dir): + """ + Hacky workaround so kernprof.py doesn't get covered twice (installed and local). + This needed to combine the .coverage files, since file paths need to be unique. + + """ + original_path = repo_dir + '/kernprof.py' + tmp_path = original_path + '.tmp' + if os.path.isfile(original_path): + os.rename(original_path, tmp_path) + elif os.path.isfile(tmp_path): + os.rename(tmp_path, original_path) + +def replace_docker_path(path, runner_project_dir): + """Update path to a file installed in a temp venv to runner_project_dir.""" + pattern = re.compile(r"\/tmp\/.+?\/site-packages") + return pattern.sub(runner_project_dir, path) + +def update_coverag_file(coverage_path, runner_project_dir): + """ + Since the paths inside of docker vary from the runner paths, + the paths in the .coverage file need to be adjusted to combine them, + since 'coverage combine ' checks if the file paths exist. + """ + try: + sqliteConnection = sqlite3.connect(coverage_path) + cursor = sqliteConnection.cursor() + print('Connected to Coverage SQLite') + + read_file_query = 'SELECT id, path from file' + cursor.execute(read_file_query) + + old_records = cursor.fetchall() + new_records = [(replace_docker_path(path, runner_project_dir), _id) for _id, path in old_records] + print('Updated coverage file paths:\n', new_records) + + sql_update_query = 'Update file set path = ? where id = ?' + cursor.executemany(sql_update_query, new_records) + sqliteConnection.commit() + print('Coverage Updated successfully') + cursor.close() + + except sqlite3.Error as error: + print('Failed to coverage: ', error) + finally: + if sqliteConnection: + sqliteConnection.close() + print('The sqlite connection is closed') + +def copy_coverage_cibuildwheel_docker(runner_project_dir): + """ + When run with cibuildwheel under linux, the tests run in the folder /project + inside docker and the coverage files need to be copied to the output folder. + """ + coverage_path = '/project/tests/.coverage' + if os.path.isfile(coverage_path): + update_coverag_file(coverage_path, runner_project_dir) + env_hash = hash((sys.version, os.environ.get('AUDITWHEEL_PLAT', ''))) + os.makedirs('/output', exist_ok=True) + os.rename(coverage_path, '/output/.coverage.{}'.format(env_hash)) + + if __name__ == '__main__': cwd = os.getcwd() @@ -10,6 +79,10 @@ test_dir = join(repo_dir, 'tests') print('cwd = {!r}'.format(cwd)) + if is_cibuildwheel(): + # rename kernprof.py to kernprof.py.tmp + temp_rename_kernprof(repo_dir) + import pytest # Prefer testing the installed version, but fallback to testing the @@ -48,8 +121,16 @@ '--cov=' + package_name, '--cov=' + 'kernprof', ] + if is_cibuildwheel(): + pytest_args.append('--cov-append') + pytest_args = pytest_args + sys.argv[1:] sys.exit(pytest.main(pytest_args)) finally: os.chdir(cwd) + if is_cibuildwheel(): + # restore kernprof.py from kernprof.py.tmp + temp_rename_kernprof(repo_dir) + # for CIBW under linux + copy_coverage_cibuildwheel_docker('/home/runner/work/line_profiler/line_profiler') print('Restoring cwd = {!r}'.format(cwd)) diff --git a/tests/test_cli.py b/tests/test_cli.py index 2eb57c79..f885911b 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -1,4 +1,5 @@ from os.path import join +from sys import executable def test_cli(): @@ -40,7 +41,7 @@ def my_inefficient_function(): tmp_lprof_fpath = join(tmp_dpath, 'foo.py.lprof') tmp_lprof_fpath - info = ub.cmd('python -m line_profiler {}'.format(tmp_lprof_fpath), + info = ub.cmd('{} -m line_profiler {}'.format(executable,tmp_lprof_fpath), cwd=tmp_dpath, verbose=3) assert info['ret'] == 0 # Check for some patterns that should be in the output @@ -53,8 +54,8 @@ def test_version_agreement(): Ensure that line_profiler and kernprof have the same version info """ import ubelt as ub - info1 = ub.cmd('python -m line_profiler --version') - info2 = ub.cmd('python -m kernprof --version') + info1 = ub.cmd('{} -m line_profiler --version'.format(executable)) + info2 = ub.cmd('{} -m kernprof --version'.format(executable)) # Strip local version suffixes version1 = info1['out'].strip().split('+')[0]