Skip to content

Merge pull request #95 from mboudet/master #276

Merge pull request #95 from mboudet/master

Merge pull request #95 from mboudet/master #276

Workflow file for this run

name: Galaxy Tool Linting and Tests for push and PR
on: [push, pull_request]
env:
GALAXY_FORK: galaxyproject
GALAXY_BRANCH: release_23.0
MAX_CHUNKS: 4
MAX_FILE_SIZE: 1M
concurrency:
# Group runs by PR, but keep runs on the default branch separate
# because we do not want to cancel ToolShed uploads
group: pr-${{ (github.ref == 'refs/heads/master' || github.ref == 'refs/heads/main') && github.run_number || github.ref }}
cancel-in-progress: true
jobs:
# the setup job does two things:
# 1. cache the pip cache and .planemo
# 2. determine the list of changed repositories
# it produces one artifact which contains
# - a file with the latest SHA from the chosen branch of the Galaxy repo
# - a file containing the list of changed repositories
# which are needed in subsequent steps.
setup:
name: Setup cache and determine changed repositories
runs-on: ubuntu-latest
outputs:
galaxy-head-sha: ${{ steps.get-galaxy-sha.outputs.galaxy-head-sha }}
repository-list: ${{ steps.discover.outputs.repository-list }}
tool-list: ${{ steps.discover.outputs.tool-list }}
chunk-count: ${{ steps.discover.outputs.chunk-count }}
chunk-list: ${{ steps.discover.outputs.chunk-list }}
commit-range: ${{ steps.discover.outputs.commit-range }}
strategy:
matrix:
python-version: ['3.7']
steps:
- name: Print github context properties
run: |
echo 'event: ${{ github.event_name }}'
echo 'sha: ${{ github.sha }}'
echo 'ref: ${{ github.ref }}'
echo 'head_ref: ${{ github.head_ref }}'
echo 'base_ref: ${{ github.base_ref }}'
echo 'event.before: ${{ github.event.before }}'
echo 'event.after: ${{ github.event.after }}'
- name: Determine latest commit in the Galaxy repo
id: get-galaxy-sha
run: echo "galaxy-head-sha=$(git ls-remote https://github.com/${{ env.GALAXY_FORK }}/galaxy refs/heads/${{ env.GALAXY_BRANCH }} | cut -f1)" >> $GITHUB_OUTPUT
- uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Cache .cache/pip
uses: actions/cache@v3
id: cache-pip
with:
path: ~/.cache/pip
key: pip_cache_py_${{ matrix.python-version }}_gxy_${{ steps.get-galaxy-sha.outputs.galaxy-head-sha }}
- name: Cache .planemo
uses: actions/cache@v3
id: cache-planemo
with:
path: ~/.planemo
key: planemo_cache_py_${{ matrix.python-version }}_gxy_${{ steps.get-galaxy-sha.outputs.galaxy-head-sha }}
# Install the `wheel` package so that when installing other packages which
# are not available as wheels, pip will build a wheel for them, which can be cached.
- name: Install wheel
run: pip install wheel
- name: Install flake8
run: pip install flake8 flake8-import-order
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Fake a Planemo run to update cache and determine commit range, repositories, and chunks
uses: galaxyproject/planemo-ci-action@v1
id: discover
with:
create-cache: ${{ steps.cache-pip.outputs.cache-hit != 'true' || steps.cache-planemo.outputs.cache-hit != 'true' }}
galaxy-fork: ${{ env.GALAXY_FORK }}
galaxy-branch: ${{ env.GALAXY_BRANCH }}
max-chunks: ${{ env.MAX_CHUNKS }}
python-version: ${{ matrix.python-version }}
- name: Install GMOD python modules
run: pip install tripal==3.2.1 biopython bcbio-gff chado==2.3.9 apollo==4.2.13
- name: Show commit range
run: echo '${{ steps.discover.outputs.commit-range }}'
- name: Show repository list
run: echo '${{ steps.discover.outputs.repository-list }}'
- name: Show tool list
run: echo '${{ steps.discover.outputs.tool-list }}'
- name: Show chunks
run: |
echo 'Using ${{ steps.discover.outputs.chunk-count }} chunks (${{ steps.discover.outputs.chunk-list }})'
# Planemo lint the changed repositories
lint:
name: Lint tool-list
needs: setup
if: ${{ needs.setup.outputs.repository-list != '' || needs.setup.outputs.tool-list != '' }}
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ['3.7']
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 1
- uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Cache .cache/pip
uses: actions/cache@v3
id: cache-pip
with:
path: ~/.cache/pip
key: pip_cache_py_${{ matrix.python-version }}_gxy_${{ needs.setup.outputs.galaxy-head-sha }}
- name: Planemo lint
uses: galaxyproject/planemo-ci-action@v1
id: lint
with:
mode: lint
repository-list: ${{ needs.setup.outputs.repository-list }}
tool-list: ${{ needs.setup.outputs.tool-list }}
- uses: actions/upload-artifact@v3
if: ${{ failure() }}
with:
name: 'Tool linting output'
path: lint_report.txt
# flake8 of Python scripts in the changed repositories
flake8:
name: Lint Python scripts
needs: setup
if: ${{ needs.setup.outputs.repository-list != '' }}
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ['3.7']
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 1
- uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Cache .cache/pip
uses: actions/cache@v3
id: cache-pip
with:
path: ~/.cache/pip
key: pip_cache_py_${{ matrix.python-version }}_gxy_${{ needs.setup.outputs.galaxy-head-sha }}
- name: Install flake8
run: pip install flake8 flake8-import-order
- name: Flake8
run: echo '${{ needs.setup.outputs.repository-list }}' | xargs -d '\n' flake8 --ignore=E501,E117 --output-file pylint_report.txt --tee
- uses: actions/upload-artifact@v3
if: ${{ failure() }}
with:
name: 'Python linting output'
path: pylint_report.txt
lintr:
name: Lint R scripts
needs: setup
if: ${{ needs.setup.outputs.repository-list != '' }}
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-20.04]
r-version: ['release']
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 1
- uses: r-lib/actions/setup-r@v2
with:
r-version: ${{ matrix.r-version }}
- name: Cache R packages
uses: actions/cache@v3
with:
path: ${{ env.R_LIBS_USER }}
key: r_cache_${{ matrix.os }}_${{ matrix.r-version }}
- name: Install non-R lintr dependencies
run: sudo apt-get install libcurl4-openssl-dev
- name: Install lintr
run: |
install.packages('remotes')
remotes::install_cran("lintr")
shell: Rscript {0}
- name: Save repositories to file
run: echo '${{ needs.setup.outputs.repository-list }}' > repository_list.txt
- name: lintr
run: |
library(lintr)
linters <- linters_with_defaults(line_length_linter = NULL, cyclocomp_linter = NULL, object_usage_linter = NULL)
con <- file("repository_list.txt", "r")
status <- 0
while (TRUE) {
repo <- readLines(con, n = 1)
if (length(repo) == 0) {
break
}
lnt <- lint_dir(repo, relative_path=T, linters=linters)
if (length(lnt) > 0) {
status <- 1
for (l in lnt) {
rel_path <- paste(repo, l$filename, sep="/")
write(paste(paste(rel_path, l$line_number, l$column_number, sep=":"), l$message, paste("(", l$line, ")")), stderr())
write(paste(paste(rel_path, l$line_number, l$column_number, sep=":"), l$message, paste("(", l$line, ")")), "rlint_report.txt", append=TRUE)
}
}
}
quit(status = status)
shell: Rscript {0}
- uses: actions/upload-artifact@v3
if: ${{ failure() }}
with:
name: 'R linting output'
path: rlint_report.txt
file_sizes:
name: Check file sizes
needs: setup
if: ${{ github.event_name == 'pull_request' && needs.setup.outputs.repository-list != '' }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Check file sizes
run: |
touch file_size_report.txt
git diff --diff-filter=d --name-only ${{ needs.setup.outputs.commit-range }} > git.diff
while read line; do
find "$line" -type f -size +${{ env.MAX_FILE_SIZE }} >> file_size_report.txt
done < git.diff
if [[ -s file_size_report.txt ]]; then
echo "Files larger than ${{ env.MAX_FILE_SIZE }} found"
cat file_size_report.txt
exit 1
fi
- uses: actions/upload-artifact@v3
if: ${{ failure() }}
with:
name: 'File size report'
path: file_size_report.txt
# Planemo test the changed repositories, each chunk creates an artifact
# containing HTML and JSON reports for the executed tests
test:
name: Test tools
needs: setup
if: ${{ needs.setup.outputs.repository-list != '' }}
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
chunk: ${{ fromJson(needs.setup.outputs.chunk-list) }}
python-version: ['3.8']
apollo: [0, 1]
services:
postgres:
image: postgres:11
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: postgres
ports:
- 5432:5432
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 1
- uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Cache .cache/pip
uses: actions/cache@v3
id: cache-pip
with:
path: ~/.cache/pip
key: pip_cache_py_${{ matrix.python-version }}_gxy_${{ needs.setup.outputs.galaxy-head-sha }}
- name: Cache .planemo
uses: actions/cache@v3
id: cache-planemo
with:
path: ~/.planemo
key: planemo_cache_py_${{ matrix.python-version }}_gxy_${{ needs.setup.outputs.galaxy-head-sha }}
- name: Get number of CPU cores
uses: SimenB/github-actions-cpu-cores@v1
id: cpu-cores
- name: Install GMOD python modules
run: pip install tripal==3.2.1 biopython bcbio-gff chado==2.3.9 apollo==4.2.13
- name: Bootstrap Apollo
if: ${{ contains(needs.setup.outputs.repository-list, 'tools/apollo') }}
env:
APOLLO_REMOTE: "${{ matrix.apollo }}"
BLAT_ENABLED: "1"
ARROW_GLOBAL_CONFIG_PATH: "${{ github.workspace }}/tools/apollo/test-data/arrow.yml"
GALAXY_SHARED_DIR: "${{ github.workspace }}/apollo_shared_dir"
run: ./bootstrap_apollo.sh
- name: Prepare specific job_conf for Apollo tools
if: ${{ contains(needs.setup.outputs.repository-list, 'tools/apollo') }}
uses: danielr1996/envsubst-action@1.0.0
env:
APOLLO_REMOTE: "${{ matrix.apollo }}"
BLAT_ENABLED: "1"
ARROW_GLOBAL_CONFIG_PATH: "${{ runner.workspace }}/galaxy-tools/tools/apollo/test-data/arrow.yml"
GALAXY_SHARED_DIR: "${{ runner.workspace }}/galaxy-tools/apollo_shared_dir"
DEFAULT_MOUNTS: "$defaults"
with:
input: "${{ github.workspace }}/job_conf_apollo.xml"
output: "${{ github.workspace }}/job_conf_apollo_subst.xml"
- name: Planemo test
uses: galaxyproject/planemo-ci-action@v1
id: test
env:
APOLLO_REMOTE: "${{ matrix.apollo }}"
BLAT_ENABLED: "1"
ARROW_GLOBAL_CONFIG_PATH: "${{ github.workspace }}/tools/apollo/test-data/arrow.yml"
GALAXY_SHARED_DIR: "${{ github.workspace }}/apollo_shared_dir"
with:
mode: test
repository-list: ${{ needs.setup.outputs.repository-list }}
galaxy-fork: ${{ env.GALAXY_FORK }}
galaxy-branch: ${{ env.GALAXY_BRANCH }}
chunk: ${{ matrix.chunk }}
chunk-count: ${{ needs.setup.outputs.chunk-count }}
galaxy-slots: ${{ steps.cpu-cores.outputs.count }}
# Limit each test to 15 minutes
test_timeout: 900
additional-planemo-options: "${{ contains(needs.setup.outputs.repository-list, 'tools/apollo') && format('--job_config_file {0}/job_conf_apollo_subst.xml', github.workspace) || '' }}"
- name: Show Apollo logs
if: ${{ contains(needs.setup.outputs.repository-list, 'tools/apollo') }}
run: docker logs apollo
- uses: actions/upload-artifact@v3
with:
name: 'Tool test output ${{ matrix.chunk }}'
path: upload
# - combine the results of the test chunks (which will never fail due
# to `|| true`) and create a global test report as json and html which
# is provided as artifact
# - check if any tool test actually failed (by lookup in the combined json)
# and fail this step if this is the case
combine_outputs:
name: Combine chunked test results
needs: [setup, test]
if: ${{ needs.setup.outputs.repository-list != '' }}
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ['3.7']
steps:
- uses: actions/download-artifact@v3
with:
path: artifacts
- uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Cache .cache/pip
uses: actions/cache@v3
id: cache-pip
with:
path: ~/.cache/pip
key: pip_cache_py_${{ matrix.python-version }}_gxy_${{ needs.setup.outputs.galaxy-head-sha }}
- name: Install jq
run: sudo apt-get install jq
- name: Combine outputs
uses: galaxyproject/planemo-ci-action@v1
id: combine
with:
mode: combine
html-report: true
- uses: actions/upload-artifact@v3
with:
name: 'All tool test results'
path: upload
- name: Check outputs
uses: galaxyproject/planemo-ci-action@v1
id: check
with:
mode: check
# deploy the tools to the toolsheds (first TTS for testing)
deploy:
name: Deploy
needs: [setup, lint, flake8, lintr, combine_outputs]
if: ${{ (github.ref == 'refs/heads/master' || github.ref == 'refs/heads/main' ) && github.repository_owner == 'galaxy-genome-annotation' }}
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ['3.7']
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 1
- uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Cache .cache/pip
uses: actions/cache@v3
id: cache-pip
with:
path: ~/.cache/pip
key: pip_cache_py_${{ matrix.python-version }}_gxy_${{ needs.setup.outputs.galaxy-head-sha }}
- name: Deploy on testtoolshed
uses: galaxyproject/planemo-ci-action@v1
with:
mode: deploy
repository-list: ${{ needs.setup.outputs.repository-list }}
shed-target: testtoolshed
shed-key: ${{ secrets.TTS_API_KEY }}
continue-on-error: true
- name: Deploy on toolshed
uses: galaxyproject/planemo-ci-action@v1
with:
mode: deploy
repository-list: ${{ needs.setup.outputs.repository-list }}
shed-target: toolshed
shed-key: ${{ secrets.TS_API_KEY }}
determine-success:
name: Check workflow success
needs: [setup, lint, flake8, lintr, file_sizes, combine_outputs]
if: ${{ always() && github.ref != 'refs/heads/master' && github.ref != 'refs/heads/main' }}
runs-on: ubuntu-latest
steps:
- name: Check tool lint status
if: ${{ needs.lint.result != 'success' && needs.flake8.result != 'skipped' }}
run: exit 1
- name: Indicate Python script lint status
if: ${{ needs.flake8.result != 'success' && needs.flake8.result != 'skipped' }}
run: exit 1
- name: Indicate R script lint status
if: ${{ needs.lintr.result != 'success' && needs.lintr.result != 'skipped' }}
run: exit 1
- name: Indicate file size check status
if: ${{ needs.file_sizes.result != 'success' && needs.file_sizes.result != 'skipped' }}
run: exit 1
- name: Check tool test status
if: ${{ needs.combine_outputs.result != 'success' && needs.combine_outputs.result != 'skipped' }}
run: exit 1