diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 407b0828..d7a73a1e 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -21,9 +21,9 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout source - uses: actions/checkout@v4 + uses: actions/checkout@v4.1.1 - name: Set up Python 3.12 for linting - uses: actions/setup-python@v4.7.1 + uses: actions/setup-python@v5.0.0 with: python-version: '3.12' - name: Install dependencies @@ -37,6 +37,7 @@ jobs: - name: Typecheck with mypy run: |- python -m pip install mypy + pip install -r requirements/runtime.txt mypy --install-types --non-interactive ./src/xdoctest mypy ./src/xdoctest build_and_test_sdist: @@ -48,9 +49,9 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout source - uses: actions/checkout@v4 + uses: actions/checkout@v4.1.1 - name: Set up Python 3.12 - uses: actions/setup-python@v4.7.1 + uses: actions/setup-python@v5.0.0 with: python-version: '3.12' - name: Upgrade pip @@ -66,7 +67,7 @@ jobs: python -m build --sdist --outdir wheelhouse - name: Install sdist run: |- - ls -al ./wheelhouse + ls -al wheelhouse pip install --prefer-binary wheelhouse/xdoctest*.tar.gz -v - name: Test minimal loose sdist run: |- @@ -80,7 +81,7 @@ jobs: # Get path to installed package MOD_DPATH=$(python -c "import xdoctest, os; print(os.path.dirname(xdoctest.__file__))") echo "MOD_DPATH = $MOD_DPATH" - python -m pytest --verbose --cov={self.mod_name} $MOD_DPATH ../tests + python -m pytest --verbose --cov=xdoctest $MOD_DPATH ../tests cd .. - name: Test full loose sdist run: |- @@ -95,13 +96,13 @@ jobs: # Get path to installed package MOD_DPATH=$(python -c "import xdoctest, os; print(os.path.dirname(xdoctest.__file__))") echo "MOD_DPATH = $MOD_DPATH" - python -m pytest --verbose --cov={self.mod_name} $MOD_DPATH ../tests + python -m pytest --verbose --cov=xdoctest $MOD_DPATH ../tests cd .. - - name: Upload sdist artifact - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v3.1.3 + name: Upload sdist artifact with: - name: wheels - path: ./wheelhouse/*.tar.gz + name: sdist_wheels + path: wheelhouse/*.tar.gz build_purepy_wheels: ## # Download and test the pure-python wheels that were build in the @@ -110,6 +111,7 @@ jobs: name: ${{ matrix.python-version }} on ${{ matrix.os }}, arch=${{ matrix.arch }} with ${{ matrix.install-extras }} runs-on: ${{ matrix.os }} strategy: + fail-fast: false matrix: os: - ubuntu-latest @@ -119,14 +121,14 @@ jobs: - auto steps: - name: Checkout source - uses: actions/checkout@v4 + uses: actions/checkout@v4.1.1 - name: Set up QEMU - uses: docker/setup-qemu-action@v3 + uses: docker/setup-qemu-action@v3.0.0 if: runner.os == 'Linux' && matrix.arch != 'auto' with: platforms: all - name: Setup Python - uses: actions/setup-python@v4.7.1 + uses: actions/setup-python@v5.0.0 with: python-version: ${{ matrix.python-version }} - name: Build pure wheel @@ -138,17 +140,19 @@ jobs: - name: Show built files shell: bash run: ls -la wheelhouse - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v3.1.3 name: Upload wheels artifact with: name: wheels path: ./wheelhouse/xdoctest*.whl test_purepy_wheels: name: ${{ matrix.python-version }} on ${{ matrix.os }}, arch=${{ matrix.arch }} with ${{ matrix.install-extras }} + if: "! startsWith(github.event.ref, 'refs/heads/release')" runs-on: ${{ matrix.os }} needs: - build_purepy_wheels strategy: + fail-fast: false matrix: # Xcookie generates an explicit list of environments that will be used # for testing instead of using the more concise matrix notation. @@ -159,7 +163,7 @@ jobs: arch: auto - python-version: '3.6' install-extras: tests-strict,runtime-strict - os: macOS-latest + os: macos-13 arch: auto - python-version: '3.6' install-extras: tests-strict,runtime-strict @@ -179,7 +183,7 @@ jobs: arch: auto - python-version: '3.12' install-extras: tests - os: windows-latest + os: macOS-latest arch: auto - python-version: '3.12' install-extras: tests @@ -187,67 +191,67 @@ jobs: arch: auto - python-version: '3.6' install-extras: tests,optional - os: windows-latest + os: ubuntu-20.04 arch: auto - python-version: '3.7' install-extras: tests,optional - os: windows-latest + os: ubuntu-latest arch: auto - python-version: '3.8' install-extras: tests,optional - os: windows-latest + os: ubuntu-latest arch: auto - python-version: '3.9' install-extras: tests,optional - os: windows-latest + os: ubuntu-latest arch: auto - python-version: '3.10' install-extras: tests,optional - os: windows-latest + os: ubuntu-latest arch: auto - python-version: '3.11' install-extras: tests,optional - os: windows-latest + os: ubuntu-latest arch: auto - python-version: '3.12' install-extras: tests,optional - os: windows-latest + os: ubuntu-latest arch: auto - - python-version: pypy-3.7 + - python-version: pypy-3.9 install-extras: tests,optional - os: windows-latest + os: ubuntu-latest arch: auto - python-version: '3.6' install-extras: tests,optional - os: windows-latest + os: macos-13 arch: auto - python-version: '3.7' install-extras: tests,optional - os: windows-latest + os: macos-13 arch: auto - python-version: '3.8' install-extras: tests,optional - os: windows-latest + os: macOS-latest arch: auto - python-version: '3.9' install-extras: tests,optional - os: windows-latest + os: macOS-latest arch: auto - python-version: '3.10' install-extras: tests,optional - os: windows-latest + os: macOS-latest arch: auto - python-version: '3.11' install-extras: tests,optional - os: windows-latest + os: macOS-latest arch: auto - python-version: '3.12' install-extras: tests,optional - os: windows-latest + os: macOS-latest arch: auto - - python-version: pypy-3.7 + - python-version: pypy-3.9 install-extras: tests,optional - os: windows-latest + os: macOS-latest arch: auto - python-version: '3.6' install-extras: tests,optional @@ -277,38 +281,26 @@ jobs: install-extras: tests,optional os: windows-latest arch: auto - - python-version: pypy-3.7 - install-extras: tests,optional - os: windows-latest - arch: auto - - python-version: pypy-3.7 - install-extras: tests,optional - os: windows-latest - arch: auto - - python-version: pypy-3.7 - install-extras: tests,optional - os: windows-latest - arch: auto - - python-version: pypy-3.7 + - python-version: pypy-3.9 install-extras: tests,optional os: windows-latest arch: auto steps: - name: Checkout source - uses: actions/checkout@v4 + uses: actions/checkout@v4.1.1 - name: Enable MSVC 64bit uses: ilammy/msvc-dev-cmd@v1 if: matrix.os == 'windows-latest' - name: Set up QEMU - uses: docker/setup-qemu-action@v3 + uses: docker/setup-qemu-action@v3.0.0 if: runner.os == 'Linux' && matrix.arch != 'auto' with: platforms: all - name: Setup Python - uses: actions/setup-python@v4.7.1 + uses: actions/setup-python@v5.0.0 with: python-version: ${{ matrix.python-version }} - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v2.1.1 name: Download wheels with: name: wheels @@ -325,6 +317,9 @@ jobs: pip install tomli pkginfo export WHEEL_FPATH=$(python -c "import pathlib; print(str(sorted(pathlib.Path('wheelhouse').glob('xdoctest*.whl'))[-1]).replace(chr(92), chr(47)))") export MOD_VERSION=$(python -c "from pkginfo import Wheel; print(Wheel('$WHEEL_FPATH').version)") + echo "$WHEEL_FPATH=WHEEL_FPATH" + echo "$INSTALL_EXTRAS=INSTALL_EXTRAS" + echo "$MOD_VERSION=MOD_VERSION" pip install --prefer-binary "xdoctest[$INSTALL_EXTRAS]==$MOD_VERSION" -f wheelhouse echo "Install finished." - name: Test wheel ${{ matrix.install-extras }} @@ -342,6 +337,7 @@ jobs: ls -altr # Get the path to the installed package and run the tests export MOD_DPATH=$(python -c "import xdoctest, os; print(os.path.dirname(xdoctest.__file__))") + export MOD_NAME=xdoctest echo " --- MOD_DPATH = $MOD_DPATH @@ -349,7 +345,7 @@ jobs: running the pytest command inside the workspace --- " - python -m pytest --verbose -p pytester -p no:doctest --xdoctest --cov-config ../pyproject.toml --cov-report term --cov="xdoctest" "$MOD_DPATH" ../tests + python -m pytest --verbose -p pytester -p no:doctest --xdoctest --cov-config ../pyproject.toml --cov-report term --durations=100 --cov="$MOD_NAME" "$MOD_DPATH" ../tests echo "pytest command finished, moving the coverage file to the repo root" ls -al # Move coverage file to a new name @@ -372,26 +368,31 @@ jobs: echo '### The cwd should now have a coverage.xml' ls -altr pwd - - uses: codecov/codecov-action@v3 + - uses: codecov/codecov-action@v4.0.1 name: Codecov Upload with: file: ./coverage.xml + token: ${{ secrets.CODECOV_TOKEN }} test_deploy: name: Uploading Test to PyPi runs-on: ubuntu-latest if: github.event_name == 'push' && ! startsWith(github.event.ref, 'refs/tags') && ! startsWith(github.event.ref, 'refs/heads/release') needs: - - build_and_test_sdist - build_purepy_wheels - - test_purepy_wheels + - build_and_test_sdist steps: - name: Checkout source - uses: actions/checkout@v4 - - uses: actions/download-artifact@v3 - name: Download wheels and sdist + uses: actions/checkout@v4.1.1 + - uses: actions/download-artifact@v2.1.1 + name: Download wheels with: name: wheels path: wheelhouse + - uses: actions/download-artifact@v2.1.1 + name: Download sdist + with: + name: sdist_wheels + path: wheelhouse - name: Show files to upload shell: bash run: ls -la wheelhouse @@ -419,23 +420,53 @@ jobs: pip install urllib3 requests[security] twine GPG_KEYID=$(cat dev/public_gpg_key) echo "GPG_KEYID = '$GPG_KEYID'" - DO_GPG=True GPG_KEYID=$GPG_KEYID TWINE_REPOSITORY_URL=${TWINE_REPOSITORY_URL} TWINE_PASSWORD=$TWINE_PASSWORD TWINE_USERNAME=$TWINE_USERNAME GPG_EXECUTABLE=$GPG_EXECUTABLE DO_UPLOAD=True DO_TAG=False ./publish.sh + GPG_SIGN_CMD="$GPG_EXECUTABLE --batch --yes --detach-sign --armor --local-user $GPG_KEYID" + WHEEL_PATHS=(wheelhouse/*.whl wheelhouse/*.tar.gz) + WHEEL_PATHS_STR=$(printf '"%s" ' "${WHEEL_PATHS[@]}") + echo "$WHEEL_PATHS_STR" + for WHEEL_PATH in "${WHEEL_PATHS[@]}" + do + echo "------" + echo "WHEEL_PATH = $WHEEL_PATH" + $GPG_SIGN_CMD --output $WHEEL_PATH.asc $WHEEL_PATH + $GPG_EXECUTABLE --verify $WHEEL_PATH.asc $WHEEL_PATH || echo "hack, the first run of gpg very fails" + $GPG_EXECUTABLE --verify $WHEEL_PATH.asc $WHEEL_PATH + done + ls -la wheelhouse + pip install opentimestamps-client + ots stamp wheelhouse/*.whl wheelhouse/*.tar.gz wheelhouse/*.asc + ls -la wheelhouse + twine upload --username __token__ --password "$TWINE_PASSWORD" --repository-url "$TWINE_REPOSITORY_URL" wheelhouse/*.whl wheelhouse/*.tar.gz --skip-existing --verbose || { echo "failed to twine upload" ; exit 1; } + - uses: actions/upload-artifact@v3.1.3 + name: Upload deploy artifacts + with: + name: deploy_artifacts + path: |- + wheelhouse/*.whl + wheelhouse/*.zip + wheelhouse/*.tar.gz + wheelhouse/*.asc + wheelhouse/*.ots live_deploy: name: Uploading Live to PyPi runs-on: ubuntu-latest if: github.event_name == 'push' && (startsWith(github.event.ref, 'refs/tags') || startsWith(github.event.ref, 'refs/heads/release')) needs: - - build_and_test_sdist - build_purepy_wheels - - test_purepy_wheels + - build_and_test_sdist steps: - name: Checkout source - uses: actions/checkout@v4 - - uses: actions/download-artifact@v3 - name: Download wheels and sdist + uses: actions/checkout@v4.1.1 + - uses: actions/download-artifact@v2.1.1 + name: Download wheels with: name: wheels path: wheelhouse + - uses: actions/download-artifact@v2.1.1 + name: Download sdist + with: + name: sdist_wheels + path: wheelhouse - name: Show files to upload shell: bash run: ls -la wheelhouse @@ -463,7 +494,78 @@ jobs: pip install urllib3 requests[security] twine GPG_KEYID=$(cat dev/public_gpg_key) echo "GPG_KEYID = '$GPG_KEYID'" - DO_GPG=True GPG_KEYID=$GPG_KEYID TWINE_REPOSITORY_URL=${TWINE_REPOSITORY_URL} TWINE_PASSWORD=$TWINE_PASSWORD TWINE_USERNAME=$TWINE_USERNAME GPG_EXECUTABLE=$GPG_EXECUTABLE DO_UPLOAD=True DO_TAG=False ./publish.sh + GPG_SIGN_CMD="$GPG_EXECUTABLE --batch --yes --detach-sign --armor --local-user $GPG_KEYID" + WHEEL_PATHS=(wheelhouse/*.whl wheelhouse/*.tar.gz) + WHEEL_PATHS_STR=$(printf '"%s" ' "${WHEEL_PATHS[@]}") + echo "$WHEEL_PATHS_STR" + for WHEEL_PATH in "${WHEEL_PATHS[@]}" + do + echo "------" + echo "WHEEL_PATH = $WHEEL_PATH" + $GPG_SIGN_CMD --output $WHEEL_PATH.asc $WHEEL_PATH + $GPG_EXECUTABLE --verify $WHEEL_PATH.asc $WHEEL_PATH || echo "hack, the first run of gpg very fails" + $GPG_EXECUTABLE --verify $WHEEL_PATH.asc $WHEEL_PATH + done + ls -la wheelhouse + pip install opentimestamps-client + ots stamp wheelhouse/*.whl wheelhouse/*.tar.gz wheelhouse/*.asc + ls -la wheelhouse + twine upload --username __token__ --password "$TWINE_PASSWORD" --repository-url "$TWINE_REPOSITORY_URL" wheelhouse/*.whl wheelhouse/*.tar.gz --skip-existing --verbose || { echo "failed to twine upload" ; exit 1; } + - uses: actions/upload-artifact@v3.1.3 + name: Upload deploy artifacts + with: + name: deploy_artifacts + path: |- + wheelhouse/*.whl + wheelhouse/*.zip + wheelhouse/*.tar.gz + wheelhouse/*.asc + wheelhouse/*.ots + release: + name: Create Github Release + if: github.event_name == 'push' && (startsWith(github.event.ref, 'refs/tags') || startsWith(github.event.ref, 'refs/heads/release')) + runs-on: ubuntu-latest + permissions: + contents: write + needs: + - live_deploy + steps: + - name: Checkout source + uses: actions/checkout@v4.1.1 + - uses: actions/download-artifact@v2.1.1 + name: Download artifacts + with: + name: deploy_artifacts + path: wheelhouse + - name: Show files to release + shell: bash + run: ls -la wheelhouse + - run: 'echo "Automatic Release Notes. TODO: improve" > ${{ github.workspace }}-CHANGELOG.txt' + - name: Tag Release Commit + if: (startsWith(github.event.ref, 'refs/heads/release')) + run: |- + export VERSION=$(python -c "import setup; print(setup.VERSION)") + git tag "v$VERSION" + git push origin "v$VERSION" + - uses: softprops/action-gh-release@v1 + name: Create Release + id: create_release + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + body_path: ${{ github.workspace }}-CHANGELOG.txt + tag_name: ${{ github.ref }} + name: Release ${{ github.ref }} + body: Automatic Release + generate_release_notes: true + draft: true + prerelease: false + files: |- + wheelhouse/*.whl + wheelhouse/*.asc + wheelhouse/*.ots + wheelhouse/*.zip + wheelhouse/*.tar.gz ### diff --git a/CHANGELOG.md b/CHANGELOG.md index 6a7544bb..9dedf443 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,7 +4,14 @@ We are currently working on porting this changelog to the specifications in [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). -## Version 1.1.3 - + +## Version 1.1.4 - Unreleased + +### Fixed +* Working around a `modname_to_modpath` issue. + + +## Version 1.1.3 - Released 2024-01-30 ### Fixed * `modname_to_modpath` now handles cases where editable packages have modules where the name is different than the package. @@ -12,7 +19,7 @@ This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.htm * Fixed deprecated usage of `ast.Num` -## Version 1.1.2 - Released 2023-010-25 +## Version 1.1.2 - Released 2023-10-25 ### Added * Partial support for 3.12. New f-string syntax is not supported yet. diff --git a/dev/setup_secrets.sh b/dev/setup_secrets.sh index 7b994cae..6321e5ac 100644 --- a/dev/setup_secrets.sh +++ b/dev/setup_secrets.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash __doc__=' ============================ SETUP CI SECRET INSTRUCTIONS diff --git a/docs/source/conf.py b/docs/source/conf.py index c9658c6d..bd984742 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -1,7 +1,7 @@ """ Notes: Based on template code in: - ~/code/xcookie/xcookie/builders/docs_conf.py + ~/code/xcookie/xcookie/builders/docs.py ~/code/xcookie/xcookie/rc/conf_ext.py http://docs.readthedocs.io/en/latest/getting_started.html @@ -17,10 +17,13 @@ # need to edit the conf.py cd ~/code/xdoctest/docs - sphinx-apidoc --private -f -o ~/code/xdoctest/docs/source ~/code/xdoctest/src/xdoctest --separate + sphinx-apidoc --private --separate -f -o ~/code/xdoctest/docs/source/auto ~/code/xdoctest/src/xdoctest '_tokenize.py' + + # Note: the module should importable before running this + # (e.g. install it in developer mode or munge the PYTHONPATH) make html - git add source/*.rst + git add source/auto/*.rst Also: To turn on PR checks @@ -34,31 +37,54 @@ ### For gitlab - The user will need to enable the repo on their readthedocs account: - https://readthedocs.org/dashboard/import/manual/? - To enable the read-the-docs go to https://readthedocs.org/dashboard/ and login - Make sure you have a .readthedocs.yml file + The user will need to enable the repo on their readthedocs account: + https://readthedocs.org/dashboard/import/manual/? - Click import project: (for github you can select, but gitlab you need to import manually) + Enter the following information: Set the Repository NAME: xdoctest Set the Repository URL: https://github.com/Erotemic/xdoctest + Make sure you have a .readthedocs.yml file + For gitlab you also need to setup an integrations. Navigate to: https://readthedocs.org/dashboard/xdoctest/integrations/create/ Then add gitlab incoming webhook and copy the URL (make sure - you copy the real url and not the text so https is included). + you copy the real url and not the text so https is included), + specifically: + + In the "Integration type:" dropdown menu, select + "Gitlab incoming webhook" + + Click "Add integration" + + Copy the text in the "Webhook URL" box to be used later. + + Copy the text in the "Secret" box to be used later. Then go to https://github.com/Erotemic/xdoctest/hooks - and add the URL + Click "Add new webhook". - select push, tag, and merge request + Copy the text previously saved from the "Webhook URL" box + in the readthedocs form into the "URL" box in the gitlab + form. + + Copy the text previously saved from the "Secret" box + in the readthedocs form into the "Secret token" box in the + gitlab form. + + For trigger permissions select the following checkboxes: + push events, + tag push events, + merge request events + + Click the "Add webhook" button. See Docs for more details https://docs.readthedocs.io/en/stable/integrations.html @@ -110,14 +136,19 @@ def visit_Assign(self, node): return visitor.version project = 'xdoctest' -copyright = '2023, Jon Crall' +copyright = '2024, Jon Crall' author = 'Jon Crall' modname = 'xdoctest' -modpath = join(dirname(dirname(dirname(__file__))), 'src/xdoctest', '__init__.py') +repo_dpath = dirname(dirname(dirname(__file__))) +mod_dpath = join(repo_dpath, 'src/xdoctest') +src_dpath = dirname(mod_dpath) +modpath = join(mod_dpath, '__init__.py') release = parse_version(modpath) version = '.'.join(release.split('.')[0:2]) +# Hack to ensure the module is importable +# sys.path.insert(0, os.path.abspath(src_dpath)) # -- General configuration --------------------------------------------------- @@ -136,8 +167,8 @@ def visit_Assign(self, node): 'sphinx.ext.napoleon', 'sphinx.ext.todo', 'sphinx.ext.viewcode', - # 'myst_parser', # TODO - + 'myst_parser', # For markdown docs + 'sphinx.ext.imgconverter', # For building latexpdf 'sphinx.ext.githubpages', # 'sphinxcontrib.redirects', 'sphinx_reredirects', @@ -148,8 +179,21 @@ def visit_Assign(self, node): napoleon_use_param = False napoleon_use_ivar = True +#autoapi_type = 'python' +#autoapi_dirs = [mod_dpath] + autodoc_inherit_docstrings = False +# Hack for geowatch, todo configure +autosummary_mock_imports = [ + 'geowatch.utils.lightning_ext._jsonargparse_ext_ge_4_24_and_lt_4_xx', + 'geowatch.utils.lightning_ext._jsonargparse_ext_ge_4_22_and_lt_4_24', + 'geowatch.utils.lightning_ext._jsonargparse_ext_ge_4_21_and_lt_4_22', + 'geowatch.tasks.fusion.datamodules.temporal_sampling.affinity_sampling', + 'geowatch.tasks.depth_pcd.model', + 'geowatch.tasks.cold.export_change_map', +] + autodoc_member_order = 'bysource' autoclass_content = 'both' # autodoc_mock_imports = ['torch', 'torchvision', 'visdom'] @@ -163,6 +207,9 @@ def visit_Assign(self, node): # autoapi_dirs = [f'../../src/{modname}'] # autoapi_keep_files = True +# References: +# https://stackoverflow.com/questions/21538983/specifying-targets-for-intersphinx-links-to-numpy-scipy-and-matplotlib + intersphinx_mapping = { # 'pytorch': ('http://pytorch.org/docs/master/', None), 'python': ('https://docs.python.org/3', None), @@ -181,10 +228,20 @@ def visit_Assign(self, node): 'scriptconfig': ('https://scriptconfig.readthedocs.io/en/latest/', None), 'rich': ('https://rich.readthedocs.io/en/latest/', None), + 'numpy': ('https://numpy.org/doc/stable/', None), + 'sympy': ('https://docs.sympy.org/latest/', None), + 'scikit-learn': ('https://scikit-learn.org/stable/', None), + 'pandas': ('https://pandas.pydata.org/docs/', None), + 'matplotlib': ('https://matplotlib.org/stable/', None), + 'pytest': ('https://docs.pytest.org/en/latest/', None), + 'platformdirs': ('https://platformdirs.readthedocs.io/en/latest/', None), + + 'timerit': ('https://timerit.readthedocs.io/en/latest/', None), + 'progiter': ('https://progiter.readthedocs.io/en/latest/', None), + 'dateutil': ('https://dateutil.readthedocs.io/en/latest/', None), # 'pytest._pytest.doctest': ('https://docs.pytest.org/en/latest/_modules/_pytest/doctest.html', None), # 'colorama': ('https://pypi.org/project/colorama/', None), - # 'numpy': ('http://docs.scipy.org/doc/numpy/', None), # 'cv2' : ('http://docs.opencv.org/2.4/', None), # 'h5py' : ('http://docs.h5py.org/en/latest/', None) } @@ -246,6 +303,7 @@ def visit_Assign(self, node): html_theme_options = { 'collapse_navigation': False, 'display_version': True, + 'navigation_depth': -1, # 'logo_only': True, } # html_logo = '.static/xdoctest.svg' @@ -275,6 +333,21 @@ def visit_Assign(self, node): # -- Options for LaTeX output ------------------------------------------------ +# References: +# https://tex.stackexchange.com/questions/546246/centos-8-the-font-freeserif-cannot-be-found + +""" +# https://www.sphinx-doc.org/en/master/usage/builders/index.html#sphinx.builders.latex.LaTeXBuilder +# https://tex.stackexchange.com/a/570691/83399 +sudo apt install fonts-freefont-otf texlive-luatex texlive-latex-extra texlive-fonts-recommended texlive-latex-recommended tex-gyre latexmk +make latexpdf LATEXMKOPTS="-shell-escape --synctex=-1 -src-specials -interaction=nonstopmode" +make latexpdf LATEXMKOPTS="-lualatex -interaction=nonstopmode" +make LATEXMKOPTS="-lualatex -interaction=nonstopmode" + +""" +# latex_engine = 'lualatex' +# latex_engine = 'xelatex' + latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # @@ -330,13 +403,24 @@ def visit_Assign(self, node): from typing import Any, List # NOQA +# HACK TO PREVENT EXCESSIVE TIME. +# TODO: FIXME FOR REAL +MAX_TIME_MINUTES = None +if MAX_TIME_MINUTES: + import ubelt # NOQA + TIMER = ubelt.Timer() + TIMER.tic() + + class PatchedPythonDomain(PythonDomain): """ References: https://github.com/sphinx-doc/sphinx/issues/3866 """ def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode): - # TODO: can use this to resolve references nicely + """ + Helps to resolves cross-references + """ if target.startswith('ub.'): target = 'ubelt.' + target[3] if target.startswith('xdoc.'): @@ -353,6 +437,7 @@ class GoogleStyleDocstringProcessor: """ def __init__(self, autobuild=1): + self.debug = 0 self.registry = {} if autobuild: self._register_builtins() @@ -407,7 +492,7 @@ def benchmark(lines): redone = new_text.split('\n') new_lines.extend(redone) # import ubelt as ub - # print('new_lines = {}'.format(ub.repr2(new_lines, nl=1))) + # print('new_lines = {}'.format(ub.urepr(new_lines, nl=1))) # new_lines.append('') return new_lines @@ -421,6 +506,17 @@ def text_art(lines): new_lines.extend(lines[1:]) return new_lines + # @self.register_section(tag='TODO', alias=['.. todo::']) + # def todo_section(lines): + # """ + # Fixup todo sections + # """ + # import xdev + # xdev.embed() + # import ubelt as ub + # print('lines = {}'.format(ub.urepr(lines, nl=1))) + # return new_lines + @self.register_section(tag='Ignore') def ignore(lines): return [] @@ -531,10 +627,12 @@ def process_docstring_callback(self, app, what_: str, name: str, obj: Any, https://www.sphinx-doc.org/en/1.5.1/_modules/sphinx/ext/autodoc.html https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html """ - # print(f'name={name}') + if self.debug: + print(f'ProcessDocstring: name={name}, what_={what_}, num_lines={len(lines)}') + # print('BEFORE:') # import ubelt as ub - # print('lines = {}'.format(ub.repr2(lines, nl=1))) + # print('lines = {}'.format(ub.urepr(lines, nl=1))) self.process(lines) @@ -547,8 +645,12 @@ def process_docstring_callback(self, app, what_: str, name: str, obj: Any, # import xdev # xdev.embed() - RENDER_IMAGES = 0 - if RENDER_IMAGES: + render_doc_images = 0 + + if MAX_TIME_MINUTES and TIMER.toc() > (60 * MAX_TIME_MINUTES): + render_doc_images = False # FIXME too slow on RTD + + if render_doc_images: # DEVELOPING if any('REQUIRES(--show)' in line for line in lines): # import xdev @@ -610,7 +712,7 @@ def process_docstring_callback(self, app, what_: str, name: str, obj: Any, lines[edit_slice] = new_lines # print('AFTER:') - # print('lines = {}'.format(ub.repr2(lines, nl=1))) + # print('lines = {}'.format(ub.urepr(lines, nl=1))) # if name == 'kwimage.Affine.translate': # import sys @@ -858,27 +960,74 @@ class Skipped(Exception): insert_index = end_index else: raise KeyError(INSERT_AT) - lines.insert(insert_index, '.. image:: {}'.format(rel_to_root_fpath)) + lines.insert(insert_index, '.. image:: {}'.format('..' / rel_to_root_fpath)) + # lines.insert(insert_index, '.. image:: {}'.format(rel_to_root_fpath)) # lines.insert(insert_index, '.. image:: {}'.format(rel_to_static_fpath)) lines.insert(insert_index, '') +def postprocess_hyperlinks(app, doctree, docname): + """ + Extension to fixup hyperlinks. + This should be connected to the Sphinx application's + "autodoc-process-docstring" event. + """ + # Your hyperlink postprocessing logic here + from docutils import nodes + import pathlib + for node in doctree.traverse(nodes.reference): + if 'refuri' in node.attributes: + refuri = node.attributes['refuri'] + if '.rst' in refuri: + if 'source' in node.document: + fpath = pathlib.Path(node.document['source']) + parent_dpath = fpath.parent + if (parent_dpath / refuri).exists(): + node.attributes['refuri'] = refuri.replace('.rst', '.html') + else: + raise AssertionError + + +def fix_rst_todo_section(lines): + new_lines = [] + for line in lines: + ... + ... + + def setup(app): import sphinx app : sphinx.application.Sphinx = app app.add_domain(PatchedPythonDomain, override=True) + + app.connect("doctree-resolved", postprocess_hyperlinks) + docstring_processor = GoogleStyleDocstringProcessor() # https://stackoverflow.com/questions/26534184/can-sphinx-ignore-certain-tags-in-python-docstrings app.connect('autodoc-process-docstring', docstring_processor.process_docstring_callback) + def copy(src, dst): + import shutil + print(f'Copy {src} -> {dst}') + assert src.exists() + if not dst.parent.exists(): + dst.parent.mkdir() + shutil.copy(src, dst) + ### Hack for kwcoco: TODO: figure out a way for the user to configure this. HACK_FOR_KWCOCO = 0 if HACK_FOR_KWCOCO: import pathlib - import shutil - doc_outdir = pathlib.Path(app.outdir) - doc_srcdir = pathlib.Path(app.srcdir) - schema_src = (doc_srcdir / '../../kwcoco/coco_schema.json') - shutil.copy(schema_src, doc_outdir / 'coco_schema.json') - shutil.copy(schema_src, doc_srcdir / 'coco_schema.json') + doc_outdir = pathlib.Path(app.outdir) / 'auto' + doc_srcdir = pathlib.Path(app.srcdir) / 'auto' + + mod_dpath = doc_srcdir / '../../../kwcoco' + + src_fpath = (mod_dpath / 'coco_schema.json') + copy(src_fpath, doc_outdir / src_fpath.name) + copy(src_fpath, doc_srcdir / src_fpath.name) + + src_fpath = (mod_dpath / 'coco_schema_informal.rst') + copy(src_fpath, doc_outdir / src_fpath.name) + copy(src_fpath, doc_srcdir / src_fpath.name) return app diff --git a/publish.sh b/publish.sh index 6a1d9c67..237ee0d8 100755 --- a/publish.sh +++ b/publish.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash __doc__=' Script to publish a new version of this library on PyPI. @@ -24,6 +24,10 @@ Args: If True, sign the packages with a GPG key specified by `GPG_KEYID`. defaults to auto. + DO_OTS (bool) : + If True, make an opentimestamp for the package and signature (if + available) + DO_UPLOAD (bool) : If True, upload the packages to the pypi server specified by `TWINE_REPOSITORY_URL`. @@ -138,11 +142,21 @@ DO_UPLOAD=${DO_UPLOAD:=$ARG_1} DO_TAG=${DO_TAG:=$ARG_1} DO_GPG=${DO_GPG:="auto"} -# Verify that we want to build if [ "$DO_GPG" == "auto" ]; then DO_GPG="True" fi +DO_OTS=${DO_OTS:="auto"} +if [ "$DO_OTS" == "auto" ]; then + # Do opentimestamp if it is available + # python -m pip install opentimestamps-client + if type ots ; then + DO_OTS="True" + else + DO_OTS="False" + fi +fi + DO_BUILD=${DO_BUILD:="auto"} # Verify that we want to build if [ "$DO_BUILD" == "auto" ]; then @@ -150,6 +164,7 @@ if [ "$DO_BUILD" == "auto" ]; then fi DO_GPG=$(normalize_boolean "$DO_GPG") +DO_OTS=$(normalize_boolean "$DO_OTS") DO_BUILD=$(normalize_boolean "$DO_BUILD") DO_UPLOAD=$(normalize_boolean "$DO_UPLOAD") DO_TAG=$(normalize_boolean "$DO_TAG") @@ -237,6 +252,7 @@ GPG_KEYID = '$GPG_KEYID' DO_UPLOAD=${DO_UPLOAD} DO_TAG=${DO_TAG} DO_GPG=${DO_GPG} +DO_OTS=${DO_OTS} DO_BUILD=${DO_BUILD} MODE_LIST_STR=${MODE_LIST_STR} " @@ -375,7 +391,7 @@ ls_array(){ } -WHEEL_PATHS=() +WHEEL_FPATHS=() for _MODE in "${MODE_LIST[@]}" do if [[ "$_MODE" == "sdist" ]]; then @@ -393,32 +409,32 @@ do for new_item in "${_NEW_WHEEL_PATHS[@]}" do if [[ "$new_item" != "" ]]; then - WHEEL_PATHS+=("$new_item") + WHEEL_FPATHS+=("$new_item") fi done done # Dedup the paths -readarray -t WHEEL_PATHS < <(printf '%s\n' "${WHEEL_PATHS[@]}" | sort -u) +readarray -t WHEEL_FPATHS < <(printf '%s\n' "${WHEEL_FPATHS[@]}" | sort -u) -WHEEL_PATHS_STR=$(printf '"%s" ' "${WHEEL_PATHS[@]}") +WHEEL_PATHS_STR=$(printf '"%s" ' "${WHEEL_FPATHS[@]}") echo "WHEEL_PATHS_STR = $WHEEL_PATHS_STR" echo " MODE=$MODE VERSION='$VERSION' -WHEEL_PATHS='$WHEEL_PATHS_STR' +WHEEL_FPATHS='$WHEEL_PATHS_STR' " - +WHEEL_SIGNATURE_FPATHS=() if [ "$DO_GPG" == "True" ]; then echo " === === " - for WHEEL_FPATH in "${WHEEL_PATHS[@]}" + for WHEEL_FPATH in "${WHEEL_FPATHS[@]}" do echo "WHEEL_FPATH = $WHEEL_FPATH" check_variable WHEEL_FPATH @@ -439,6 +455,8 @@ if [ "$DO_GPG" == "True" ]; then echo "Verifying wheels" $GPG_EXECUTABLE --verify "$WHEEL_FPATH".asc "$WHEEL_FPATH" || { echo 'could not verify wheels' ; exit 1; } + + WHEEL_SIGNATURE_FPATHS+=("$WHEEL_FPATH".asc) done echo " === === @@ -448,6 +466,27 @@ else fi + +if [ "$DO_OTS" == "True" ]; then + + echo " + === === + " + if [ "$DO_GPG" == "True" ]; then + # Stamp the wheels and the signatures + ots stamp "${WHEEL_FPATHS[@]}" "${WHEEL_SIGNATURE_FPATHS[@]}" + else + # Stamp only the wheels + ots stamp "${WHEEL_FPATHS[@]}" + fi + echo " + === === + " +else + echo "DO_OTS=False, Skipping OTS sign" +fi + + if [[ "$DO_TAG" == "True" ]]; then TAG_NAME="v${VERSION}" # if we messed up we can delete the tag @@ -467,7 +506,7 @@ if [[ "$DO_UPLOAD" == "True" ]]; then check_variable TWINE_USERNAME check_variable TWINE_PASSWORD "hide" - for WHEEL_FPATH in "${WHEEL_PATHS[@]}" + for WHEEL_FPATH in "${WHEEL_FPATHS[@]}" do twine upload --username "$TWINE_USERNAME" "--password=$TWINE_PASSWORD" \ --repository-url "$TWINE_REPOSITORY_URL" \ @@ -496,3 +535,39 @@ else !!! FINISH: DRY RUN !!! """ fi + +__devel__=' +# Checking to see how easy it is to upload packages to gitlab. +# This logic should go in the CI script, not sure if it belongs here. + + +export HOST=https://gitlab.kitware.com +export GROUP_NAME=computer-vision +export PROJECT_NAME=geowatch +PROJECT_VERSION=$(geowatch --version) +echo "$PROJECT_VERSION" + +load_secrets +export PRIVATE_GITLAB_TOKEN=$(git_token_for "$HOST") +TMP_DIR=$(mktemp -d -t ci-XXXXXXXXXX) + +curl --header "PRIVATE-TOKEN: $PRIVATE_GITLAB_TOKEN" "$HOST/api/v4/groups" > "$TMP_DIR/all_group_info" +GROUP_ID=$(cat "$TMP_DIR/all_group_info" | jq ". | map(select(.name==\"$GROUP_NAME\")) | .[0].id") +echo "GROUP_ID = $GROUP_ID" + +curl --header "PRIVATE-TOKEN: $PRIVATE_GITLAB_TOKEN" "$HOST/api/v4/groups/$GROUP_ID" > "$TMP_DIR/group_info" +PROJ_ID=$(cat "$TMP_DIR/group_info" | jq ".projects | map(select(.name==\"$PROJECT_NAME\")) | .[0].id") +echo "PROJ_ID = $PROJ_ID" + +ls_array DIST_FPATHS "dist/*" + +for FPATH in "${DIST_FPATHS[@]}" +do + FNAME=$(basename $FPATH) + echo $FNAME + curl --header "PRIVATE-TOKEN: $PRIVATE_GITLAB_TOKEN" \ + --upload-file $FPATH \ + "https://gitlab.kitware.com/api/v4/projects/$PROJ_ID/packages/generic/$PROJECT_NAME/$PROJECT_VERSION/$FNAME" +done + +' diff --git a/src/xdoctest/__init__.py b/src/xdoctest/__init__.py index 0a76bd78..00372fd9 100644 --- a/src/xdoctest/__init__.py +++ b/src/xdoctest/__init__.py @@ -313,7 +313,7 @@ def fib(n): mkinit xdoctest --nomods ''' -__version__ = '1.1.3' +__version__ = '1.1.4' # Expose only select submodules diff --git a/src/xdoctest/utils/util_import.py b/src/xdoctest/utils/util_import.py index b81f1419..10c99a5b 100644 --- a/src/xdoctest/utils/util_import.py +++ b/src/xdoctest/utils/util_import.py @@ -62,34 +62,6 @@ def _importlib_import_modpath(modpath): # nocover return module -def _pkgutil_modname_to_modpath(modname): # nocover - """ - faster version of :func:`_syspath_modname_to_modpath` using builtin python - mechanisms, but unfortunately it doesn't play nice with pytest. - - Args: - modname (str): the module name. - - Example: - >>> # xdoctest: +SKIP - >>> modname = 'xdoctest.static_analysis' - >>> _pkgutil_modname_to_modpath(modname) - ...static_analysis.py - >>> # xdoctest: +REQUIRES(CPython) - >>> _pkgutil_modname_to_modpath('_ctypes') - ..._ctypes... - - Ignore: - >>> _pkgutil_modname_to_modpath('cv2') - """ - import pkgutil - loader = pkgutil.find_loader(modname) - if loader is None: - raise Exception('No module named {} in the PYTHONPATH'.format(modname)) - modpath = loader.get_filename().replace('.pyc', '.py') - return modpath - - class PythonPathContext(object): """ Context for temporarily adding a dir to the PYTHONPATH. @@ -320,8 +292,8 @@ def import_module_from_path(modpath, index=-1): >>> assert module.testvar == 1 Example: - >>> import pytest >>> # xdoctest: +SKIP("ubelt dependency") + >>> import pytest >>> with pytest.raises(IOError): >>> ub.import_module_from_path('does-not-exist') >>> with pytest.raises(IOError): @@ -704,17 +676,21 @@ def check_dpath(dpath): # break with pytest anymore? Nope, pytest still doesn't work right # with it. for finder_fpath in new_editable_finder_paths: - mapping = _static_parse('MAPPING', finder_fpath) try: - target = dirname(mapping[_pkg_name]) - except KeyError: + mapping = _static_parse('MAPPING', finder_fpath) + except AttributeError: ... else: - if not exclude or normalize(target) not in real_exclude: # pragma: nobranch - modpath = check_dpath(target) - if modpath: # pragma: nobranch - found_modpath = modpath - break + try: + target = dirname(mapping[_pkg_name]) + except KeyError: + ... + else: + if not exclude or normalize(target) not in real_exclude: # pragma: nobranch + modpath = check_dpath(target) + if modpath: # pragma: nobranch + found_modpath = modpath + break if found_modpath is not None: break @@ -767,6 +743,51 @@ def check_dpath(dpath): return found_modpath +def _importlib_modname_to_modpath(modname): # nocover + import importlib.util + spec = importlib.util.find_spec(modname) + print(f'spec={spec}') + modpath = spec.origin.replace('.pyc', '.py') + return modpath + + +def _pkgutil_modname_to_modpath(modname): # nocover + """ + faster version of :func:`_syspath_modname_to_modpath` using builtin python + mechanisms, but unfortunately it doesn't play nice with pytest. + + Note: + pkgutil.find_loader is deprecated in 3.12 and removed in 3.14 + + Args: + modname (str): the module name. + + Example: + >>> # xdoctest: +SKIP + >>> modname = 'xdoctest.static_analysis' + >>> _pkgutil_modname_to_modpath(modname) + ...static_analysis.py + >>> # xdoctest: +REQUIRES(CPython) + >>> _pkgutil_modname_to_modpath('_ctypes') + ..._ctypes... + + Ignore: + >>> _pkgutil_modname_to_modpath('cv2') + """ + import pkgutil + loader = pkgutil.find_loader(modname) + if loader is None: + raise Exception('No module named {} in the PYTHONPATH'.format(modname)) + try: + modpath = loader.get_filename().replace('.pyc', '.py') + except Exception: + print('Issue in _pkgutil_modname_to_modpath') + print(f'loader = {loader!r}') + print(f'modname = {modname!r}') + raise + return modpath + + def modname_to_modpath(modname, hide_init=True, hide_main=False, sys_path=None): """ Finds the path to a python module from its name. @@ -808,7 +829,18 @@ def modname_to_modpath(modname, hide_init=True, hide_main=False, sys_path=None): >>> modpath = basename(modname_to_modpath('_ctypes')) >>> assert 'ctypes' in modpath """ - modpath = _syspath_modname_to_modpath(modname, sys_path) + if hide_main or sys_path: + modpath = _syspath_modname_to_modpath(modname, sys_path) + else: + # import xdev + # with xdev.embed_on_exception_context: + # try: + # modpath = _importlib_modname_to_modpath(modname) + # except Exception: + # modpath = _syspath_modname_to_modpath(modname, sys_path) + # modpath = _pkgutil_modname_to_modpath(modname, sys_path) + modpath = _syspath_modname_to_modpath(modname, sys_path) + if modpath is None: return None