Skip to content

Commit

Permalink
A/B test jobs (#314)
Browse files Browse the repository at this point in the history
  • Loading branch information
crusaderky authored Sep 9, 2022
1 parent d748681 commit a08904c
Show file tree
Hide file tree
Showing 13 changed files with 595 additions and 10 deletions.
367 changes: 367 additions & 0 deletions .github/workflows/ab_tests.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,367 @@
name: A/B Tests
on:
push:
branches-ignore:
- main

concurrency:
# Include `github.event_name` to avoid pushes to `main` and
# scheduled jobs canceling one another
group: ab_tests-${{ github.event_name }}-${{ github.ref }}
cancel-in-progress: true

defaults:
# Required shell entrypoint to have properly activated conda environments
run:
shell: bash -l {0}

jobs:
discover_ab_envs:
name: Discover A/B environments
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0
- uses: actions/setup-python@v4
with:
python-version: '3.10'
- id: set-matrix
run: echo "::set-output name=matrix::$(python ci/scripts/discover_ab_environments.py)"
outputs:
matrix: ${{ steps.set-matrix.outputs.matrix }}

# Everything below this point runs iff there are files matching
# AB_environments/AB_*.conda.yaml
# AB_environments/AB_*.dask.yaml

software:
name: Setup
runs-on: ubuntu-latest
needs: discover_ab_envs
if: ${{ fromJson(needs.discover_ab_envs.outputs.matrix) }}
strategy:
fail-fast: false
matrix:
python-version: ["3.9"]
runtime-version: ${{ fromJson(needs.discover_ab_envs.outputs.matrix) }}

steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0

- name: Set up environment
uses: conda-incubator/setup-miniconda@v2
with:
miniforge-variant: Mambaforge
use-mamba: true
condarc-file: ci/condarc
python-version: ${{ matrix.python-version }}
environment-file: ci/environment.yml

- name: Build Coiled Software Environment
env:
DASK_COILED__TOKEN: ${{ secrets.COILED_BENCHMARK_BOT_TOKEN }}
run: |
export PYTHON_VERSION_FORMATTED=$(echo "${{ matrix.python-version }}" | sed 's/\.//g' )
export REF_NAME_FORMATTED=$(echo "$GITHUB_REF_NAME" | sed 's/\./-/g' )
export COILED_SOFTWARE_NAME_HEAD=dask-engineering/coiled-runtime-${{ github.event_name }}
export COILED_SOFTWARE_NAME_TAIL=$GITHUB_RUN_ID-$(echo ${{ matrix.runtime-version }} | tr 'A-Z' 'a-z')-py$PYTHON_VERSION_FORMATTED
if [[ ${{ github.event_name }} = 'pull_request' ]]
then
export COILED_SOFTWARE_NAME=$COILED_SOFTWARE_NAME_HEAD-${{ github.event.number }}-$COILED_SOFTWARE_NAME_TAIL
else
export COILED_SOFTWARE_NAME=$COILED_SOFTWARE_NAME_HEAD-$GITHUB_REF_TYPE-$REF_NAME_FORMATTED-$COILED_SOFTWARE_NAME_TAIL
fi
cp AB_environments/${{ matrix.runtime-version }}.conda.yaml coiled_software_environment.yaml
export COILED_SOFTWARE_ENV=$(python ci/dask_config_to_env.py AB_environments/${{ matrix.runtime-version }}.dask.yaml)
export ENV_FILE=coiled_software_environment.yaml
cat $ENV_FILE
mamba install coiled
echo "Creating Coiled software environment for $COILED_SOFTWARE_NAME"
echo "Environment parameters: $COILED_SOFTWARE_ENV"
coiled env create --name $COILED_SOFTWARE_NAME --conda $ENV_FILE $COILED_SOFTWARE_ENV
# Put COILED_SOFTWARE_NAME into a file so it can be downloaded in subsequent workflow jobs
echo $COILED_SOFTWARE_NAME > software_name.txt
# Dummy for compatibility with tests.yml
echo false > test_upstream.txt
- name: Upload environment file
uses: actions/upload-artifact@v3
with:
name: software-environment-${{ matrix.runtime-version }}-py${{ matrix.python-version }}
path: |
coiled_software_environment.yaml
software_name.txt
test_upstream.txt
runtime:
name: Runtime - ${{ matrix.os }}, Python ${{ matrix.python-version }}, Runtime ${{ matrix.runtime-version }}
needs: [discover_ab_envs, software]
runs-on: ${{ matrix.os }}
timeout-minutes: 120
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest]
python-version: ["3.9"]
runtime-version: ${{ fromJson(needs.discover_ab_envs.outputs.matrix) }}

steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0

- name: Set up environment
uses: conda-incubator/setup-miniconda@v2
with:
miniforge-variant: Mambaforge
use-mamba: true
condarc-file: ci/condarc
python-version: ${{ matrix.python-version }}
environment-file: ci/environment.yml

- name: Download software environment assets
if: matrix.runtime-version == 'latest' || startsWith(matrix.runtime-version, 'AB_')
uses: actions/download-artifact@v3
with:
name: software-environment-${{ matrix.runtime-version }}-py${{ matrix.python-version }}

- name: Install coiled-runtime
env:
COILED_RUNTIME_VERSION: ${{ matrix.runtime-version }}
run: source ci/scripts/install_coiled_runtime.sh

- name: Run Coiled Runtime Tests
id: test
env:
DASK_COILED__TOKEN: ${{ secrets.COILED_BENCHMARK_BOT_TOKEN }}
AWS_ACCESS_KEY_ID: ${{ secrets.RUNTIME_CI_BOT_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.RUNTIME_CI_BOT_AWS_SECRET_ACCESS_KEY }}
COILED_RUNTIME_VERSION: ${{ matrix.runtime-version }}
DB_NAME: runtime-${{ matrix.os }}-${{ matrix.runtime-version }}-py${{ matrix.python-version }}.db
BENCHMARK: true
run: bash ci/scripts/run_tests.sh tests/runtime

- name: Upload benchmark results
uses: actions/upload-artifact@v3
if: always()
with:
name: runtime-${{ matrix.os }}-${{ matrix.runtime-version }}-py${{ matrix.python-version }}
path: runtime-${{ matrix.os }}-${{ matrix.runtime-version }}-py${{ matrix.python-version }}.db

benchmarks:
name: Benchmarks - ${{ matrix.os }}, Python ${{ matrix.python-version }}, Runtime ${{ matrix.runtime-version }}
needs: [discover_ab_envs, software]
runs-on: ${{ matrix.os }}
timeout-minutes: 120
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest]
python-version: ["3.9"]
runtime-version: ${{ fromJson(needs.discover_ab_envs.outputs.matrix) }}

steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0

- name: Set up environment
uses: conda-incubator/setup-miniconda@v2
with:
miniforge-variant: Mambaforge
use-mamba: true
condarc-file: ci/condarc
python-version: ${{ matrix.python-version }}
environment-file: ci/environment.yml

- name: Download software environment assets
uses: actions/download-artifact@v3
with:
name: software-environment-${{ matrix.runtime-version }}-py${{ matrix.python-version }}

- name: Install coiled-runtime
env:
COILED_RUNTIME_VERSION: ${{ matrix.runtime-version }}
run: source ci/scripts/install_coiled_runtime.sh

- name: Run benchmarking tests
id: benchmarking_tests
env:
DASK_COILED__TOKEN: ${{ secrets.COILED_BENCHMARK_BOT_TOKEN }}
AWS_ACCESS_KEY_ID: ${{ secrets.RUNTIME_CI_BOT_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.RUNTIME_CI_BOT_AWS_SECRET_ACCESS_KEY }}
COILED_RUNTIME_VERSION: ${{ matrix.runtime-version }}
DB_NAME: benchmark-${{ matrix.os }}-${{ matrix.runtime-version }}-py${{ matrix.python-version }}.db
BENCHMARK: true
run: bash ci/scripts/run_tests.sh tests/benchmarks

- name: Upload benchmark results
uses: actions/upload-artifact@v3
if: always()
with:
name: benchmark-${{ matrix.os }}-${{ matrix.runtime-version }}-py${{ matrix.python-version }}
path: benchmark-${{ matrix.os }}-${{ matrix.runtime-version }}-py${{ matrix.python-version }}.db

stability:
name: Stability - ${{ matrix.os }}, Python ${{ matrix.python-version }}, Runtime ${{ matrix.runtime-version }}
needs: [discover_ab_envs, software]
runs-on: ${{ matrix.os }}
timeout-minutes: 120
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest]
python-version: ["3.9"]
runtime-version: ${{ fromJson(needs.discover_ab_envs.outputs.matrix) }}

steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0

- name: Set up environment
uses: conda-incubator/setup-miniconda@v2
with:
miniforge-variant: Mambaforge
use-mamba: true
condarc-file: ci/condarc
python-version: ${{ matrix.python-version }}
environment-file: ci/environment.yml

- name: Download software environment assets
if: matrix.runtime-version == 'latest' || startsWith(matrix.runtime-version, 'AB_')
uses: actions/download-artifact@v3
with:
name: software-environment-${{ matrix.runtime-version }}-py${{ matrix.python-version }}

- name: Install coiled-runtime
env:
COILED_RUNTIME_VERSION: ${{ matrix.runtime-version }}
run: source ci/scripts/install_coiled_runtime.sh

- name: Run stability tests
id: stability_tests
env:
DASK_COILED__TOKEN: ${{ secrets.COILED_BENCHMARK_BOT_TOKEN }}
AWS_ACCESS_KEY_ID: ${{ secrets.RUNTIME_CI_BOT_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.RUNTIME_CI_BOT_AWS_SECRET_ACCESS_KEY }}
COILED_RUNTIME_VERSION: ${{ matrix.runtime-version }}
DB_NAME: stability-${{ matrix.os }}-${{ matrix.runtime-version }}-py${{ matrix.python-version }}.db
BENCHMARK: true
CLUSTER_DUMP: true
run: bash ci/scripts/run_tests.sh tests/stability

- name: Upload benchmark results
uses: actions/upload-artifact@v3
if: always()
with:
name: stability-${{ matrix.os }}-${{ matrix.runtime-version }}-py${{ matrix.python-version }}
path: stability-${{ matrix.os }}-${{ matrix.runtime-version }}-py${{ matrix.python-version }}.db

cleanup:
needs: [discover_ab_envs, software, runtime, benchmarks, stability]
if: always() && ${{ fromJson(needs.discover_ab_envs.outputs.matrix) }}
name: Cleanup
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ["3.9"]
runtime-version: ${{ fromJson(needs.discover_ab_envs.outputs.matrix) }}

steps:
- uses: actions/checkout@v2

- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}

- name: Install coiled
run: python -m pip install coiled

- name: Download software environment assets
uses: actions/download-artifact@v3
with:
name: software-environment-${{ matrix.runtime-version }}-py${{ matrix.python-version }}

- name: Remove Coiled software environment
env:
DASK_COILED__TOKEN: ${{ secrets.COILED_BENCHMARK_BOT_TOKEN }}
run: |
export SOFTWARE_NAME=$(cat software_name.txt)
echo "Deleting $SOFTWARE_NAME"
coiled env delete $SOFTWARE_NAME
process-results:
needs: [discover_ab_envs, runtime, benchmarks, stability]
name: Combine separate benchmark results
if: always() && ${{ fromJson(needs.discover_ab_envs.outputs.matrix) }}
runs-on: ubuntu-latest
concurrency:
# Fairly strict concurrency rule to avoid stepping on benchmark db.
# Could eventually replace with a real db in coiled, RDS, or litestream
group: process-benchmarks
cancel-in-progress: false
steps:
- uses: actions/checkout@v2

- uses: actions/setup-python@v4

- name: Install dependencies
run: pip install alembic

- uses: actions/download-artifact@v3
with:
path: benchmarks

- name: Combine benchmarks
run: |
ls -lhR benchmarks
bash ci/scripts/combine-dbs.sh
- name: Upload benchmark results as artifact
uses: actions/upload-artifact@v3
with:
name: benchmark.db
path: benchmark.db

static-site:
needs: [discover_ab_envs, process-results]
# Always generate the site, as this can be skipped even if an indirect dependency fails (like a test run)
if: always() && ${{ fromJson(needs.discover_ab_envs.outputs.matrix) }}
name: Build static dashboards
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0

- uses: actions/download-artifact@v3
with:
name: benchmark.db

- name: Set up environment
uses: conda-incubator/setup-miniconda@v2
with:
miniforge-variant: Mambaforge
use-mamba: true
python-version: "3.9"
environment-file: ci/environment-dashboard.yml

- name: Generate dashboards
run: python dashboard.py -d benchmark.db -o static -b AB_baseline

- name: Upload artifact
uses: actions/upload-artifact@v3
with:
name: static-dashboard
path: static
Loading

0 comments on commit a08904c

Please sign in to comment.