Skip to content

API: Add Vertex AI integration #288

API: Add Vertex AI integration

API: Add Vertex AI integration #288

name: ARTKIT Release Pipeline
on:
push:
# The push trigger will cause the workflow to run when:
# - Commits are pushed directly to the 1.0.x branch or any release/* branch.
# - A pull request is merged into the 1.0.x branch or any release/* branch.
# - Changes are pushed to the 1.0.x or release/* branches through re-bases or
# fast-forwards.
#
# The GitHub actions variable for the branch being pushed to is `github.ref`.
# values formatted as `refs/heads/<branch-name>`
branches:
- 1.0.x
- release/*
pull_request:
# The workflow will run when:
# - A pull request targeting the 1.0.x branch or any branch matching release/* is
# created.
# - Commits are pushed to the source branch of an open pull request targeting these
# branches.
# - An existing pull request targeting these branches is reopened or synchronized.
#
# The GitHub actions variable for the branch from which the pull request originates
# is `github.head_ref`.
# The GitHub actions variable for the branch being targeted by the pull request is
# `github.base_ref`.
# The values of these variables are the branch names without any prefixes.
#
# Variable github.ref represents the merge commit GitHub created to simulate the
# result of merging the pull request into the target branch, and it is formatted as
# `refs/pull/<pull-request-number>/merge`.
branches:
- 1.0.x
- release/*
schedule: # runs on default branch
- cron: "0 2 * * 1-5" # Every weekday at 2 AM
env:
project_name: artkit
package_name: artkit
jobs:
code_quality_checks:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
# We check out the result of the trigger action, i.e., either the merge commit
# from a pull request or the commit from a direct push to the target branch
ref: ${{ github.ref }}
# We fetch the last two commits to ensure we have the base branch's history
fetch-depth: 2
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: 3.10.14
- name: Upgrade pip
# Ensure we run the latest version of pip
run: python -m pip install --upgrade pip
- name: Run isort
# Check if the import order is correct
run: |
python -m pip install isort~=5.12
python -m isort --check --diff .
- name: Run black
# Check if the code is formatted correctly
run: |
python -m pip install black~=24.4.2
python -m black --check .
- name: Run flake8
# Check for code style issues
run: |
python -m pip install flake8~=5.0 flake8-comprehensions~=3.10
python -m flake8 --config tox.ini -v .
- name: Run mypy
# Check that type hints are complete and correct
run: |
python -m pip install mypy
# add package dependencies for mypy
dependencies=(
aiohttp
fluxus~=1.0
gamma-pytools~=3.0
openai
pandas-stubs
pytest
types-PyYAML
boto3
moto
boto3-stubs
)
pip install "${dependencies[@]}"
python -m mypy src --config-file pyproject.toml
detect_build_config_changes:
# We check if the push or PR contains changes in the build configuration, in which
# case we will run the full pipeline. The build configuration files are any of the
# following:
#
# - meta.yaml
# - pyproject.toml
# - release-pipeline.yml
# - tox.ini
# - make.py
runs-on: ubuntu-latest
outputs:
# Export the output of the step as boolean variable `conda_build_config_changed`,
# where 1 indicates that the build configuration has changed and 0 indicates that
# it has not.
conda_build_config_changed: ${{ steps.diff.outputs.conda_build_config_changed }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 2
- name: Detect changes in build configuration
id: diff
run: |
set -eux
files_changed=$(git diff HEAD^ --name-only)
echo "Files changed since last commit: ${files_changed}"
n_files_changed=$(echo "${files_changed}" | grep -i -E 'meta\.yaml|pyproject\.toml|release-pipeline\.yml|tox\.ini|make\.py' | wc -l | xargs || true)
if [ ${n_files_changed} -gt 0 ]; then
build_changed=1
echo "build config has been changed"
else
build_changed=0
echo "build config is unchanged"
fi
echo "::set-output name=conda_build_config_changed::$build_changed"
unit_tests:
# Run unit tests on the codebase, in developer mode
runs-on: ubuntu-latest
# We only run the unit tests once the code quality checks have passed
needs: code_quality_checks
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set up Python 3.10.14
uses: actions/setup-python@v5
with:
python-version: 3.10.14
- name: Display directory contents
run: ls ${{ github.workspace }}
- name: Activate micromamba environment with pytest
run: |
set -eux
# install micromamba
curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest | tar -xvj bin/micromamba
export MAMBA_ROOT_PREFIX=~/micromamba
eval "$(./bin/micromamba shell hook -s posix)"
# install the develop environment
micromamba env create --yes --file environment.yml
micromamba activate ${{ env.project_name }}
export PYTHONPATH=${{ github.workspace }}/src/
export RUN_PACKAGE_VERSION_TEST=${{ env.project_name }}
pytest \
--cov ${{ env.project_name }} \
--cov-config "tox.ini" \
--cov-report=xml:coverage.xml --cov-report=html:htmlcov \
--junitxml pytest.xml \
. -s
- name: Publish test results
if: always()
uses: actions/upload-artifact@v4
with:
name: pytest-results
path: pytest.xml
- name: Publish code coverage results
if: always()
uses: actions/upload-artifact@v4
with:
name: code-coverage-results
path: coverage.xml
conda_tox_essential:
# The "essential" conda/tox test is a matrix test stripped down to a minimal set of
# configurations. It is run on pull requests that are not scheduled runs, do not
# involve changes to the build configuration, and are not PRs preparing or executing
# a release.
#
# The actual build for conda or PyPi is carried out in the make.py script, which
# will invoke tox or conda-build depending on the `build-system` parameter.
runs-on: ubuntu-latest
needs:
- code_quality_checks
- detect_build_config_changes
- check_release
# IF (no build configs changed) AND (not a scheduled run) AND (no PR from dev/ to release/) AND (no push into release/)
if: ${{ needs.detect_build_config_changes.outputs.conda_build_config_changed != '1' && github.event_name != 'schedule' && !startsWith(github.head_ref, 'dev/') && !startsWith(github.base_ref, 'release/') && !startsWith(github.ref, 'refs/heads/release/') }}
strategy:
matrix:
python-version: [ 3.10.14, 3.12 ]
build-system: [ conda, tox ]
pkg-dependencies: [ max, min ]
exclude:
- build-system: conda
pkg-dependencies: min
- python-version: 3.12
pkg-dependencies: min
- python-version: 3.10.14
pkg-dependencies: max
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Display directory contents
run: ls ${{ github.workspace }}
- name: Install and configure micromamba
if: matrix.build-system == 'conda'
run: |
curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest | tar -xvj bin/micromamba
export MAMBA_ROOT_PREFIX=~/micromamba
eval "$(./bin/micromamba shell hook -s posix)"
micromamba create -n build -y
micromamba activate build
micromamba install -y -c conda-forge boa~=0.14 toml~=0.10 flit~=3.6 packaging~=20.9
- name: Install dependencies using tox
if: matrix.build-system == 'tox'
run: |
python -m pip install "toml~=0.10"
python -m pip install "flit~=3.7"
python -m pip install "tox~=3.25"
- name: Build and test
run: |
set -eux
if [ "${{ matrix.build-system }}" = "conda" ]; then
export MAMBA_ROOT_PREFIX=~/micromamba
eval "$(./bin/micromamba shell hook -s posix)"
micromamba activate build
fi
echo "DIRNAME_WORKSPACE=$(dirname "${{ github.workspace }}")" >> $GITHUB_ENV
export RUN_PACKAGE_VERSION_TEST=${{ env.project_name }}
cd ${{ github.workspace }}
./make.py ${{ env.project_name }} ${{ matrix.build-system }} ${{ matrix.pkg-dependencies }}
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: "${{ matrix.build-system }}_${{ matrix.pkg-dependencies }}"
path: ${{ env.DIRNAME_WORKSPACE }}/dist
conda_tox_matrix:
# The full conda/tox test is a matrix test that runs on scheduled runs, when the
# build configuration has changed, or for PRs from dev/ to release/ or pushes into
# the release/ branch (triggering a release).
#
# The matrix tests comprise six test runs, three for conda and three for tox. Both
# conda and tox are run with the default, minimum, and maximum package dependencies
# to ensure compatibility across the range of supported versions of 3rd party
# libraries.
#
# The actual build for conda or PyPi is carried out in the make.py script, which
# will invoke tox or conda-build depending on the `build-system` parameter.
runs-on: ubuntu-latest
needs:
- detect_build_config_changes
- code_quality_checks
- check_release
# IF (build config changed) OR (PR from dev/ to release/) OR (push into release/) OR (scheduled run)
if: needs.detect_build_config_changes.outputs.conda_build_config_changed == '1' || (startsWith(github.head_ref, 'dev/') && startsWith(github.base_ref, 'release/')) || startsWith(github.ref, 'refs/heads/release/') || github.event_name == 'schedule'
strategy:
matrix:
python-version: [ 3.10.14, 3.12 ]
build-system: [ conda, tox ]
pkg-dependencies: [ default, min, max ]
exclude:
- python-version: 3.10.14
pkg-dependencies: max
- python-version: 3.12
pkg-dependencies: default
- python-version: 3.12
pkg-dependencies: min
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Display directory contents
run: ls ${{ github.workspace }}
- name: Install and configure micromamba
if: matrix.build-system == 'conda'
run: |
set -eux
# install micromamba
curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest | tar -xvj bin/micromamba
export MAMBA_ROOT_PREFIX=~/micromamba
eval "$(./bin/micromamba shell hook -s posix)"
micromamba create -n build -y
micromamba activate build
micromamba install -y -c conda-forge boa~=0.14 toml~=0.10 flit~=3.6 packaging~=20.9
- name: Install dependencies using tox
if: matrix.build-system == 'tox'
run: |
python -m pip install "toml~=0.10"
python -m pip install "flit~=3.7"
python -m pip install "tox~=3.25"
- name: Build and test
run: |
set -eux
if [ "${{ matrix.build-system }}" = "conda" ]; then
export MAMBA_ROOT_PREFIX=~/micromamba
eval "$(./bin/micromamba shell hook -s posix)"
micromamba activate build
fi
echo "DIRNAME_WORKSPACE=$(dirname "${{ github.workspace }}")" >> $GITHUB_ENV
export RUN_PACKAGE_VERSION_TEST=${{ env.project_name }}
cd ${{ github.workspace }}
./make.py ${{ env.project_name }} ${{ matrix.build-system }} ${{ matrix.pkg-dependencies }}
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: "${{ matrix.build-system }}_${{ matrix.pkg-dependencies }}"
path: ${{ env.DIRNAME_WORKSPACE }}/dist
veracode_check:
# Run Veracode static code analysis on the codebase.
# We only do this when the code quality checks have passed and the pipeline is
# preparing or executing a release.
runs-on: ubuntu-latest
needs:
- code_quality_checks
# IF (PR from dev/ to release/) OR (push into release/) OR (scheduled run)
# if: startsWith(github.head_ref, 'dev/') || startsWith(github.ref, 'refs/heads/release/') || github.event_name == 'schedule'
if: github.event_name == 'schedule'
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Prepare archive for Veracode
shell: bash
run: |
set -eux
eval "$(conda shell.bash hook)"
cd ${{ github.workspace }}
mkdir static_scan
git archive --format=zip --output static_scan/archive.zip HEAD
- name: Run Veracode Scan
id: upload_and_scan
uses: veracode/veracode-uploadandscan-action@master
with:
appname: 'artkit'
version: '3.1.${{ github.run_number }}'
filepath: 'static_scan/archive.zip'
vid: '${{ secrets.VERACODE_API_ID }}'
vkey: '${{ secrets.VERACODE_API_KEY }}'
createprofile: false
scanpollinginterval: '60'
check_release:
# Branch names include version information. THe main branches are named
# `<major>.<minor>.x`, while release branches are named `dev/<major>.<minor>.<patch>`
# or `release/<major>.<minor>.<patch>`.
#
# We check that the package version matches the branch name for versioned branches.
runs-on: ubuntu-latest
# IF (creating/syncing PR merging dev/ into release/) OR (push into release/) OR (PR into *.*.x)
if: (startsWith(github.head_ref, 'dev/') && startsWith(github.base_ref, 'release/')) || startsWith(github.ref, 'refs/heads/release/') || endsWith(github.base_ref, '.x')
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: 3.10.14
- name: Checkout artkit
uses: actions/checkout@v4
with:
path: artkit
- name: Set env vars
run: |
pip install packaging~=20.9
export PYTHONPATH=${{ github.workspace }}/artkit/sphinx/make
package_path=${{ github.workspace }}/${{ env.project_name }}/src/${{ env.project_name }}
version=$(python -c "import make_util; print(make_util.get_package_version(package_path='$package_path'))")
echo "current_version=$version" >> $GITHUB_ENV
if [ ${{ github.event_name }} == 'pull_request' ]; then
echo "BRANCH_NAME=${{ github.head_ref }}" >> $GITHUB_ENV
else
ref=${{ github.ref }}
branch_name=${ref/refs\/heads\//}
echo "BRANCH_NAME=$branch_name" >> $GITHUB_ENV
fi
- name: Check version consistency
# IF (PR from dev/ to release/) OR (push into release/)
if: (startsWith(github.head_ref, 'dev/') && startsWith(github.base_ref, 'release/')) || startsWith(github.ref, 'refs/heads/release/')
run: |
set -eux
python -m pip install toml~=0.10.2 packaging~=20.9
cd ${{ github.workspace }}/artkit
python <<EOF
from make import ToxBuilder
branch = "${{ env.BRANCH_NAME }}"
print(f"Checking package version consistency with branch: {branch}")
assert (
branch.startswith("release/") or branch.startswith("dev/")
), "This check should only run on versioned branches – check pipeline."
branch_version = branch.split("/", maxsplit=1)[1]
package_version = ToxBuilder("${{ env.project_name }}", "default").package_version
assert (
package_version == branch_version
), f"Package version '{package_version}' does not match '{branch_version}' from branch."
print("Check passed.")
EOF
release:
# The release job is responsible for publishing the package to PyPi, Anaconda, and
# GitHub releases. It is triggered by a push to the release branch or a PR targeting
# the release branch.
#
# In the case of a PR, the job will prepare the release. The actual release will be
# only be executed upon a push to the release branch.
runs-on: ubuntu-latest
# IF (PR from dev/ to release/) OR (push into release/)
if: (startsWith(github.head_ref, 'dev/') && startsWith(github.base_ref, 'release/')) || startsWith(github.ref, 'refs/heads/release/')
needs:
- check_release
- conda_tox_matrix
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: 3.10.14
- name: Checkout artkit
uses: actions/checkout@v4
with:
path: artkit
- name: Get package version
run: |
set -eux
echo "Getting version"
pip install packaging~=20.9
export PYTHONPATH=${{ github.workspace }}/artkit/sphinx/make
package_path=${{ github.workspace }}/${{ env.project_name }}/src/${{ env.project_name }}
version=$(python -c "import make_util; print(make_util.get_package_version(package_path='$package_path'))")
echo "current_version=$version" >> $GITHUB_ENV
echo "Detecting pre-release ('dev' or 'rc' in version)"
prerelease=False
[[ $version == *dev* ]] && prerelease=True && echo "Development release identified"
[[ $version == *rc* ]] && prerelease=True && echo "Pre-release identified"
echo "is_prerelease=$prerelease" >> $GITHUB_ENV
echo "DIRNAME_WORKSPACE=$(dirname "${{ github.workspace }}")" >> $GITHUB_ENV
- name: Download Tox Build Artifact
uses: actions/download-artifact@v4
with:
name: tox_default
path: ${{ env.DIRNAME_WORKSPACE }}/dist
- name: Download Conda Build Artifact
uses: actions/download-artifact@v4
with:
name: conda_default
path: ${{ env.DIRNAME_WORKSPACE }}/dist
- name: Publish to PyPi (Tox)
# Publish the package to PyPi using flit.
# Do this only upon a push to the release branch.
if: startsWith(github.ref, 'refs/heads/release/')
env:
FLIT_PASSWORD: ${{ secrets.PYPI_API_TOKEN }}
FLIT_USERNAME: __token__
run: |
set -eux
cd ${{ github.workspace }}/${{ env.project_name }}
pip install flit
flit install
flit publish
echo "pypi_published=True" >> $GITHUB_ENV
- name: Publish to Anaconda (Conda)
# Publish the package to Anaconda using anaconda-client.
# Do this only upon a push to the release branch.
if: startsWith(github.ref, 'refs/heads/release/')
env:
CONDA_TOKEN: ${{ secrets.CONDA_TOKEN }}
run: |
set -eux
cd ${{ env.DIRNAME_WORKSPACE }}
eval "$(conda shell.bash hook)"
conda install -y anaconda-client
anaconda -t ${CONDA_TOKEN} upload --user bcgx --force ${{ env.DIRNAME_WORKSPACE }}/dist/conda/noarch/${{ env.package_name }}-*.tar.bz2
anaconda logout
echo "conda_published=True" >> $GITHUB_ENV
- name: GitHub Release
# Create a GitHub release with the package artifacts.
# Do this only upon a push to the release branch.
if: startsWith(github.ref, 'refs/heads/release/')
uses: softprops/action-gh-release@v1
with:
tag_name: ${{ env.current_version }}
files: |
${{ github.workspace }}/tox_default/tox/${{ env.package_name }}-*.tar.gz
${{ env.DIRNAME_WORKSPACE }}/dist/conda/noarch/${{ env.package_name }}-*.tar.bz2
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_REPOSITORY: ${{ github.repository }}
release_name: "${{ env.project_name }} ${{ env.current_version }}"
body: |
This is the ${{ env.current_version }} release of ${{ env.package_name }}.
You can upgrade your current pip installation via
pip install --upgrade ${{ env.package_name }}
Your conda package can be upgraded by running
conda install -c conda-forge -c bcgx ${{ env.package_name }}
draft: true
prerelease: ${{ env.is_prerelease }}
docs:
# The docs job is responsible for building and publishing the documentation to the
# GitHub Pages branch. It is triggered by a push to the release branch or a PR
# targeting the release branch.
#
# In the case of a PR, the job will only prepare the documentation.
# The actual publication will only be executed upon a push to the release branch.
runs-on: ubuntu-latest
# IF (PR from dev/ to release/) OR (push into release/)
if: (startsWith(github.head_ref, 'dev/') && startsWith(github.base_ref, 'release/')) || startsWith(github.ref, 'refs/heads/release/')
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: 3.10.14
- name: Retrieve current documentation versions from github-pages
run: |
set -eux
if git show-ref --verify --quiet refs/heads/github-pages; then
echo "Checking out github-pages"
git fetch origin github-pages --depth=1
git checkout --track origin/github-pages
else
echo "Branch github-pages does not exist. Creating and checking out..."
git checkout -b github-pages
fi
# make sure we have a docs directory
mkdir -p docs/docs-version
echo "Current documentation contents:"
ls docs/docs-version
# create staging area
mkdir -p ${{ github.workspace }}/staging
# copy the current documentation versions to the staging area
cp -r docs/docs-version ${{ github.workspace }}/staging/docs-version.bak
- name: Build latest documentation
run: |
set -eux
if [ "${{ github.event_name }}" == 'pull_request' ]; then
BRANCH_NAME=${{ github.head_ref }}
else
BRANCH_NAME=${{ github.ref_name }}
fi
echo "Checking out $BRANCH_NAME"
git fetch origin $BRANCH_NAME --depth=1
git checkout $BRANCH_NAME
# install micromamba
curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest | tar -xvj bin/micromamba
export MAMBA_ROOT_PREFIX=~/micromamba
eval "$(./bin/micromamba shell hook -s posix)"
# install the docs environment
micromamba env create -n docs --yes --file environment.yml
micromamba activate docs
export PYTHONPATH=${{ github.workspace }}/src/
python sphinx/make.py html
- name: Merge previous and latest docs
# Merge the previous and latest documentation versions and prepare for
# deployment. The merged documentation is stored in the staging area.
run: |
set -eux
export MAMBA_ROOT_PREFIX=~/micromamba
eval "$(./bin/micromamba shell hook -s posix)"
# install the tree utility
sudo apt-get install -y tree
echo "Restoring previous documentation to the docs directory"
pwd # /home/runner/work/artkit/artkit
mkdir -p docs
mv ${{ github.workspace }}/staging/docs-version.bak docs/docs-version
ls docs/docs-version
mkdir -p ${{ github.workspace }}/sphinx/build/
micromamba activate docs
python sphinx/make.py prepare_docs_deployment
echo "Current docs contents:"
tree docs
mv ${{ github.workspace }}/docs staging/docs
- name: Archive docs
uses: actions/upload-artifact@v4
with:
name: ${{ env.project_name }}_docs
path: ${{ github.workspace }}/staging/docs
- name: Publish docs to branch github-pages
# Publish the documentation to the GitHub Pages branch.
# Do this only upon a push to the release branch.
if: startsWith(github.ref, 'refs/heads/release/')
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
current_version: ${{ env.current_version}}
run: |
set -eux
echo "Adjusting git credentials"
git config --global user.name "GitHub Actions"
git config --global user.email "actions@github.com"
git checkout github-pages
echo "Exporting version as env var"
pip install packaging~=20.9
export PYTHONPATH=${{ github.workspace }}/sphinx/make
package_path="${{ github.workspace }}/src/${{ env.project_name }}"
version=$(python -c "import make_util; print(make_util.get_package_version(package_path='$package_path'))")
rm -rf docs
mv staging/docs .
git add docs
git status
git commit -m "Publish GitHub Pages [skip ci]"
git tag -a -m "Release version $version" $version
git push -f --set-upstream origin github-pages