Skip to content

Updates installation instructions and CONTRIBUTING #845

Updates installation instructions and CONTRIBUTING

Updates installation instructions and CONTRIBUTING #845

Workflow file for this run

name: build
on:
workflow_dispatch:
schedule:
- cron: 0 0 * * 0 # weekly
pull_request:
branches:
- main
jobs:
# based on https://slashgear.github.io/how-to-split-test-by-folder-with-github-action/
get_notebooks:
runs-on: ubuntu-latest
outputs:
notebook: ${{ steps.get-notebooks.outputs.nb }}
steps:
- uses: actions/checkout@v3
- id: get-notebooks
run: "echo \"nb=$(ls examples/*ipynb | jq -R -s -c 'split(\"\\n\")[:-1]')\"\
\ >> $GITHUB_OUTPUT\n"
notebooks:
runs-on: ubuntu-latest
needs: [get_notebooks]
strategy:
matrix:
python-version: [3.7, 3.8, 3.9, '3.10']
notebook: ${{fromJson(needs.get_notebooks.outputs.notebook)}}
fail-fast: false
name: Execute notebooks
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
cache: pip
cache-dependency-path: setup.py
- name: Setup FFmpeg
uses: FedericoCarboni/setup-ffmpeg@v2
- name: Install dependencies
run: |
pip install --upgrade --upgrade-strategy eager .[dev,nb]
- name: Download TID2013 dataset
if: ${{ matrix.notebook == 'examples/04_Perceptual_distance.ipynb' }}
run: |
mkdir -p data
wget https://osf.io/7nfkz/download -O ./data/tid2013.rar
7z x ./data/tid2013.rar -o./data/tid2013/
- name: Run notebooks
if: ${{ !contains(fromJSON('["examples/Demo_Eigendistortion.ipynb", "examples/Metamer-Portilla-Simoncelli.ipynb"]'), matrix.notebook) }}
run: jupyter execute ${{ matrix.notebook }}.ipynb --kernel_name=python3
- name: Run notebooks
if: ${{ matrix.notebook == 'examples/Metamer-Portilla-Simoncelli.ipynb' }}
# this notebook takes much longer than the rest (if run to completion,
# ~2hr on a laptop). We use papermill's parameters to reduce the max
# number of steps for metamer synthesis here (we want to test that each
# cell runs, but we don't need synthesis to go to completion). also,
# download images required for notebook
run: |
mkdir -p data
wget https://osf.io/eqr3t/download -O ./data/portilla_simoncelli_images.tar.gz
tar xfz ./data/portilla_simoncelli_images.tar.gz --directory=./data/
pip install --upgrade --upgrade-strategy eager papermill
papermill ${{ matrix.notebook }} ${{ matrix.notebook }}_output.ipynb -p short_synth_max_iter 10 -p long_synth_max_iter 10 -p longest_synth_max_iter 10 -k python3 --cwd examples/
- name: Run notebooks
if: ${{ matrix.notebook == 'examples/Demo_Eigendistortion.ipynb' }}
# this notebook takes much longer than the rest (if run to completion,
# ~1hr on a laptop, more than 5 hours on the Github runners). We use
# papermill's parameters to reduce the max number of iters for
# eigendistortion synthesis here (we want to test that each cell runs,
# but we don't need synthesis to go to completion)
run: |
pip install --upgrade --upgrade-strategy eager papermill
papermill ${{ matrix.notebook }} ${{ matrix.notebook }}_output.ipynb -p max_iter_frontend 10 -p max_iter_vgg 10 -k python3 --cwd examples/
tests:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.7, 3.8, 3.9, '3.10']
fail-fast: false
name: Run pytest scripts
steps:
- uses: actions/checkout@v3
- name: Install Python 3
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
cache: pip
cache-dependency-path: setup.py
- name: Install dependencies
run: |
# using the --upgrade and --upgrade-strategy eager flags ensures that
# pip will always install the latest allowed version of all
# dependencies, to make sure the cache doesn't go stale
pip install --upgrade --upgrade-strategy eager .[dev]
- name: Run tests with pytest
run: |
pytest -n auto --cov-report xml
- name: Upload coverage to Codecov
uses: codecov/codecov-action@858dd794fbb81941b6d60b0dca860878cba60fa9 # v3.1.1
all_tutorials_in_docs:
runs-on: ubuntu-latest
name: Check that all tutorial notebooks are included in docs
needs: [get_notebooks]
strategy:
matrix:
notebook: ${{fromJson(needs.get_notebooks.outputs.notebook)}}
steps:
- uses: actions/checkout@v3
- name: Check for file
shell: bash
# there are two levels that the notebooks can be in
run: if [[ -z "$(grep ${{ matrix.notebook }} docs/tutorials/*nblink)" && -z "$(grep ${{ matrix.notebook }} docs/tutorials/*/*nblink)" ]] ; then
exit 1; fi
no_extra_nblinks:
runs-on: ubuntu-latest
name: Check that we don't have any extra nblink files
steps:
- uses: actions/checkout@v3
- name: Check same number of nblink and notebooks
shell: bash
run: |
n_nblink=0; for file in docs/tutorials/*nblink; do let "n_nblink+=1"; done;
for file in docs/tutorials/*/*nblink; do let "n_nblink+=1"; done;
n_ipynb=0; for file in examples/*ipynb; do let "n_ipynb+=1"; done;
if [[ $n_nblink != $n_ipynb ]]; then exit 1; fi;
check_urls:
runs-on: ubuntu-latest
name: Check all urls are valid
steps:
- uses: actions/checkout@v3
- uses: urlstechie/urlchecker-action@0.0.34
with:
file_types: .md,.py,.rst,.ipynb
print_all: false
timeout: 5
retry_count: 3
check:
if: always()
needs:
- notebooks
- tests
- all_tutorials_in_docs
- no_extra_nblinks
runs-on: ubuntu-latest
steps:
- name: Decide whether all tests and notebooks succeeded
uses: re-actors/alls-green@afee1c1eac2a506084c274e9c02c8e0687b48d9e # v1.2.2
with:
jobs: ${{ toJSON(needs) }}