Publishing #3675
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
on: | |
workflow_dispatch: | |
inputs: | |
deployment_id: | |
type: string | |
description: The workflow id of the deployment that triggered the release creation. | |
required: true | |
assets_from_run: | |
type: string | |
description: Optional argument to take assets from a prior run of this workflow; facilitates rerunning a failed workflow without re-building the assets. | |
required: false | |
manual_assets_creation: | |
type: string | |
required: false | |
description: Do not trigger a pipeline on GitLab but instead use the assets contained in the draft release with the given name. | |
github_commit: | |
type: string | |
description: Optional argument to set the GitHub commit to use for the final build and validation. | |
required: false | |
nvidia_mgpu_commit: | |
type: string | |
description: Optional argument to set the GitLab commit to use for the nvidia-mgpu target. | |
required: false | |
include_docs: | |
type: boolean | |
description: Include the generated documentation in the docker image(s). | |
default: false | |
workflow_run: | |
workflows: | |
- Deployments | |
types: | |
- completed | |
name: Publishing | |
jobs: | |
assets: | |
name: Assets | |
if: github.event_name == 'workflow_dispatch' || github.event.workflow_run.conclusion == 'success' | |
runs-on: ubuntu-latest | |
permissions: | |
actions: read | |
contents: read | |
environment: | |
name: ghcr-deployment | |
url: ${{ vars.deployment_url }} | |
outputs: | |
artifacts_url: ${{ steps.artifacts.outputs.artifacts_url }} | |
retrieved_assets: ${{ steps.assets_retrieval.outputs.artifact_name }} | |
github_commit: ${{ steps.artifacts.outputs.github_commit }} | |
docker_images: ${{ steps.artifacts.outputs.docker_images }} | |
python_wheels: ${{ steps.artifacts.outputs.python_wheels }} | |
installers: ${{ steps.artifacts.outputs.installers }} | |
releases: ${{ steps.artifacts.outputs.releases }} | |
release_title: ${{ steps.artifacts.outputs.release_title }} | |
release_version: ${{ steps.artifacts.outputs.release_version }} | |
cudaq_version: ${{ steps.artifacts.outputs.cudaq_version }} | |
steps: | |
- name: Checkout repository | |
uses: actions/checkout@v4 | |
with: | |
repository: ${{ vars.assets_repo || github.repository }} | |
ref: ${{ vars.data_branch }} | |
token: ${{ secrets.REPO_BOT_ACCESS_TOKEN }} | |
- name: Download build info | |
id: artifacts | |
run: | | |
if ${{ inputs.deployment_id != '' }}; then | |
artifacts_url=https://api.github.com/repos/${{ github.repository }}/actions/runs/${{ inputs.deployment_id }}/artifacts | |
else | |
artifacts_url=${{ github.event.workflow_run.artifacts_url }} | |
echo "Artifacts downloaded from https://github.com/${{ github.repository }}/actions/runs/${{ github.event.workflow_run.id }}" >> $GITHUB_STEP_SUMMARY | |
fi | |
artifacts=$(gh api $artifacts_url --paginate -q '.artifacts[] | {name: .name, url: .archive_download_url}') | |
function download { | |
gh api $1 > info.zip | |
unzip -d info info.zip | |
for file in `find info/ -type f`; do | |
cat "$file" >> $2.txt | |
done | |
rm -rf info info.zip | |
} | |
docker_images="{\"info_files\":[]}" | |
python_wheels="{\"info_files\":[]}" | |
installers="{\"info_files\":[]}" | |
for artifact in `echo "$artifacts"`; do | |
name=`echo $artifact | jq -r '.name'` | |
url=`echo $artifact | jq -r '.url'` | |
if [ "${name%_publishing}" != "$name" ]; then | |
download "$url" "$name" | |
platforms=`cat "$name.txt" | egrep -o 'platforms?: \S*' | cut -d ' ' -f 2` | |
cuda_version=`cat "$name.txt" | egrep -o 'cuda-version: \S*' | cut -d ' ' -f 2` | |
image_hash=`cat "$name.txt" | grep -o -e "cuda-quantum-dev-image: \S*" -e "cuda-quantum-assets-image: \S*" -e "cuda-quantum-wheeldeps-image: \S*" | cut -d ' ' -f 2` | |
if [ -n "$(echo $platforms | grep ',')" ]; then runner=linux-amd64-cpu8 | |
else runner=$(echo $platforms | tr / -)-cpu8 | |
fi | |
# Make sure to push all staging commits to a separate branch | |
# to ensure that a pipeline runs for each commit. | |
staging_branch=bot/${{ inputs.assets_from_run || github.run_id }}/$name | |
# Use a dedicated draft release for each set of assets to avoid issues | |
# with race conditions when running artifacts builds concurrently. | |
release_id=${{ inputs.assets_from_run || github.run_id }}_$name | |
releases+="$release_id " | |
info={\"$name\":{\"release_id\":\"$release_id\",\"staging_branch\":\"$staging_branch\",\"runner\":\"$runner\",\"artifacts_url\":\"$artifacts_url\",\"platform\":\"$platforms\",\"cuda_version\":\"$cuda_version\",\"image_hash\":\"$image_hash\"}} | |
if [ "${name#image}" != "$name" ]; then | |
docker_images=`echo $docker_images | jq ".info_files |= . + [\"$name\"]"` | |
docker_images=`echo $docker_images | jq ". |= . + $info"` | |
elif [ "${name#python}" != "$name" ]; then | |
python_wheels=`echo $python_wheels | jq ".info_files |= . + [\"$name\"]"` | |
python_wheels=`echo $python_wheels | jq ". |= . + $info"` | |
elif [ "${name#installer}" != "$name" ]; then | |
installers=`echo $installers | jq ".info_files |= . + [\"$name\"]"` | |
installers=`echo $installers | jq ". |= . + $info"` | |
fi | |
elif [ "${name#deployment_info}" != "$name" ]; then | |
download "$url" "$name" | |
github_commit=`cat "$name.txt" | grep -o 'source-sha: \S*' | cut -d ' ' -f 2` | |
release_title=`cat "$name.txt" | grep -o 'release-title: \S*' | cut -d ' ' -f 2` | |
release_version=`cat "$name.txt" | grep -o 'release-version: \S*' | cut -d ' ' -f 2` | |
elif [ "$name" == "cuda_quantum_docs" ] && ${{ github.event_name == 'workflow_dispatch' && inputs.include_docs }}; then | |
docs_archive="$(pwd)/cuda_quantum_docs.zip" | |
gh api $url > "$docs_archive" | |
fi | |
done | |
# CUDA-Q version number | |
is_versioned=${{ github.ref_type == 'tag' || startsWith(github.ref_name, 'releases/') || startsWith(github.ref_name, 'staging/') }} | |
if ${is_versioned}; then | |
cudaq_version=`echo ${{ github.ref_name }} | egrep -o "([0-9]{1,}\.)+[0-9]{1,}"` | |
else | |
cudaq_version=${release_version:-0.0.0} | |
fi | |
echo "release_title=$release_title" >> $GITHUB_OUTPUT | |
echo "release_version=$release_version" >> $GITHUB_OUTPUT | |
echo "releases='$releases'" >> $GITHUB_OUTPUT | |
echo "github_commit=$github_commit" >> $GITHUB_OUTPUT | |
echo "docker_images=$(echo $docker_images)" >> $GITHUB_OUTPUT | |
echo "python_wheels=$(echo $python_wheels)" >> $GITHUB_OUTPUT | |
echo "installers=$(echo $installers)" >> $GITHUB_OUTPUT | |
echo "artifacts_url=$artifacts_url" >> $GITHUB_OUTPUT | |
echo "cudaq_version=$cudaq_version" >> $GITHUB_OUTPUT | |
echo "docs_archive=$docs_archive" >> $GITHUB_OUTPUT | |
env: | |
GH_TOKEN: ${{ github.token }} | |
- name: Trigger assets creation | |
id: assets_creation | |
if: inputs.assets_from_run == '' | |
run: | | |
# The commit title of the staging commit needs to match | |
# the docker image that is used to build additional components. | |
# Using the image sha as the file name and the docker image name | |
# as the folder is convenient for the GitLab CI. | |
if ${{ inputs.manual_assets_creation == '' }}; then | |
git config --global user.name "cuda-quantum-bot" | |
git config --global user.email "cuda-quantum-bot@users.noreply.github.com" | |
current_branch=$(git rev-parse --abbrev-ref HEAD) | |
else | |
gh release download ${{ inputs.manual_assets_creation }} -R ${{ vars.assets_repo || github.repository }} | |
unzip -d /tmp/ ${{ inputs.manual_assets_creation }}.zip && rm -rf ${{ inputs.manual_assets_creation }}.zip | |
fi | |
function create_assets { | |
release_id=`echo "$4" | jq -r ".\"$1\".release_id"` | |
staging_branch=`echo "$4" | jq -r ".\"$1\".staging_branch"` | |
artifacts_url=`echo "$4" | jq -r ".\"$1\".artifacts_url"` | |
staging_folder="deployments/staging/$3" | |
image_hash=`cat "$1.txt" | grep -o "$2: \S*" | cut -d ' ' -f 2` | |
file_id=`echo $image_hash | rev | cut -d ':' -f 1 | rev` | |
if [ -n "$(cat "$1.txt" | tail -1)" ]; then echo >> "$1.txt"; fi | |
if ${{ inputs.nvidia_mgpu_commit != '' }}; then | |
sed -i '/nvidia-mgpu-commit/d' "$1.txt" | |
echo "nvidia-mgpu-commit: ${{ inputs.nvidia_mgpu_commit }}" >> "$1.txt" | |
fi | |
echo "asset-name: $1.txt" >> "$1.txt" | |
echo "release-id: $release_id" >> "$1.txt" | |
echo "artifacts-url: $artifacts_url" >> "$1.txt" | |
if ${{ inputs.manual_assets_creation == '' }}; then | |
echo "Pushing $1 to $staging_branch" | |
mkdir -p "$staging_folder" && mv -v "$1.txt" "$staging_folder/$file_id" | |
git add "$staging_folder" && git commit -m "$image_hash" | |
git pull origin -- $staging_branch 2> /dev/null || true | |
git push origin $current_branch:$staging_branch | |
else | |
if [ -z "$(gh release list -R ${{ vars.assets_repo || github.repository }} | grep -s $release_id)" ]; then | |
versions=`gh release list -R ${{ vars.assets_repo || github.repository }} --exclude-drafts --exclude-pre-releases | egrep -o "([0-9]{1,}\.)+[0-9]{1,}\S*" | sort -r -V` | |
latest_tag=`echo $versions | cut -d ' ' -f 1` | |
rel_notes="This release draft is created by a publishing workflow with manual assets creation." | |
rel_notes+=$(echo "<br/>GitHub commit [${{ steps.artifacts.outputs.github_commit }}](${{ github.repository }}/tree/${{ steps.artifacts.outputs.github_commit }})") | |
echo "Creating draft release $release_id." | |
# If we use a separate repo to store temporary assets, we can't use the github commit as the target | |
# since it does not exist on that repo. However, these drafts are deleted again at the end of the workflow | |
# and nothing relies on the target. | |
gh release create $release_id --title $release_id -R ${{ vars.assets_repo || github.repository }} \ | |
--target ${{ vars.assets_repo && 'main' || steps.artifacts.outputs.github_commit }} \ | |
--draft --prerelease \ | |
--generate-notes --notes-start-tag $latest_tag --notes "$rel_notes" | |
else | |
echo "::error::Release $release_id already exists." | |
exit 1 | |
fi | |
assets_folder=$(echo $release_id | cut -d _ -f2-) | |
upload=`find /tmp/${{ inputs.manual_assets_creation }}/$assets_folder -name '*.zip'` | |
echo "Uploading assets $upload $1.txt..." | |
echo $upload | xargs gh release upload $release_id -R ${{ vars.assets_repo || github.repository }} --clobber "$1.txt" | |
fi | |
} | |
for file in ${{ join(fromJson(steps.artifacts.outputs.docker_images).info_files, ' ') }}; do | |
create_assets $file cuda-quantum-dev-image cuda-quantum-dev '${{ steps.artifacts.outputs.docker_images }}' | |
done | |
for file in ${{ join(fromJson(steps.artifacts.outputs.python_wheels).info_files, ' ') }}; do | |
create_assets $file cuda-quantum-wheeldeps-image cuda-quantum-devdeps '${{ steps.artifacts.outputs.python_wheels }}' | |
done | |
for file in ${{ join(fromJson(steps.artifacts.outputs.installers).info_files, ' ') }}; do | |
create_assets $file cuda-quantum-assets-image cuda-quantum-assets '${{ steps.artifacts.outputs.installers }}' | |
done | |
env: | |
GH_TOKEN: ${{ secrets.REPO_BOT_ACCESS_TOKEN }} | |
- name: Wait for assets | |
run: | | |
for release_id in `echo ${{ steps.artifacts.outputs.releases }}`; do | |
while [ -z "$(gh release list -R ${{ vars.assets_repo || github.repository }} | grep -s $release_id)" ]; | |
do echo "Waiting for assets $release_id ..." && sleep 300; | |
done | |
done | |
env: | |
GH_TOKEN: ${{ secrets.REPO_BOT_ACCESS_TOKEN }} | |
# We can delete staging branch now after the expected draft releases have been created. | |
- name: Clean up | |
if: steps.assets_creation.outcome != 'skipped' && inputs.manual_assets_creation == '' | |
run: | | |
# Clean up the staging branch that was used to trigger the GitLab pipeline. | |
git config --global user.name "cuda-quantum-bot" | |
git config --global user.email "cuda-quantum-bot@users.noreply.github.com" | |
function delete_staging_branch { | |
staging_branch=`echo "$2" | jq -r ".\"$1\".staging_branch"` | |
echo "Delete staging branch $staging_branch ..." | |
if [[ "$staging_branch" =~ ^bot\/.*$ ]]; then | |
git push origin --delete $staging_branch | |
else | |
echo "::error::Unexpected staging branch." | |
exit 1 | |
fi | |
} | |
for info_file in ${{ join(fromJson(steps.artifacts.outputs.docker_images).info_files, ' ') }}; do | |
delete_staging_branch $info_file '${{ steps.artifacts.outputs.docker_images }}' | |
done | |
for info_file in ${{ join(fromJson(steps.artifacts.outputs.python_wheels).info_files, ' ') }}; do | |
delete_staging_branch $info_file '${{ steps.artifacts.outputs.python_wheels }}' | |
done | |
for info_file in ${{ join(fromJson(steps.artifacts.outputs.installers).info_files, ' ') }}; do | |
delete_staging_branch $info_file '${{ steps.artifacts.outputs.installers }}' | |
done | |
env: | |
GH_TOKEN: ${{ secrets.REPO_BOT_ACCESS_TOKEN }} | |
- name: Retrieve CUDA-Q assets | |
id: assets_retrieval | |
run: | | |
assets_folder=/tmp/assets && mkdir "$assets_folder" && cd "$assets_folder" | |
echo "artifact_name=downstream_assets_${{ github.run_id }}" >> $GITHUB_OUTPUT | |
if ${{ steps.artifacts.outputs.docs_archive != '' }}; then | |
unzip -d "documentation" ${{steps.artifacts.outputs.docs_archive}} | |
fi | |
function download_assets { | |
mkdir "$1" && cd "$1" | |
release_id=`echo "$2" | jq -r ".\"$1\".release_id"` | |
while ! [ -f "$1.txt" ]; do | |
echo "Download $release_id assets..." | |
(gh release download $release_id -R ${{ vars.assets_repo || github.repository }}) || true | |
sleep 30 | |
done | |
cd .. | |
} | |
for file in ${{ join(fromJson(steps.artifacts.outputs.docker_images).info_files, ' ') }}; do | |
download_assets $file '${{ steps.artifacts.outputs.docker_images }}' | |
done | |
for file in ${{ join(fromJson(steps.artifacts.outputs.python_wheels).info_files, ' ') }}; do | |
download_assets $file '${{ steps.artifacts.outputs.python_wheels }}' | |
done | |
for file in ${{ join(fromJson(steps.artifacts.outputs.installers).info_files, ' ') }}; do | |
download_assets $file '${{ steps.artifacts.outputs.installers }}' | |
done | |
env: | |
GH_TOKEN: ${{ secrets.REPO_BOT_ACCESS_TOKEN }} | |
- name: Upload CUDA-Q assets | |
if: steps.artifacts.outputs.releases != '' | |
uses: actions/upload-artifact@v4 | |
with: | |
name: ${{ steps.assets_retrieval.outputs.artifact_name }} | |
path: /tmp/assets | |
retention-days: 1 | |
if-no-files-found: warn | |
cudaq_images: | |
name: CUDA-Q Docker image | |
if: ${{ toJson(fromJson(needs.assets.outputs.docker_images).info_files) != '[]' }} | |
needs: assets | |
runs-on: linux-amd64-cpu8 | |
permissions: | |
contents: read | |
packages: write | |
id-token: write | |
environment: | |
name: ghcr-deployment | |
url: ${{ vars.deployment_url }} | |
strategy: | |
matrix: | |
info_file: ${{ fromJson(needs.assets.outputs.docker_images).info_files }} | |
fail-fast: false | |
steps: | |
- name: Checkout repository | |
uses: actions/checkout@v4 | |
with: | |
ref: ${{ inputs.github_commit || needs.assets.outputs.github_commit }} | |
- name: Log in to DockerHub | |
uses: docker/login-action@v3 | |
with: | |
username: ${{ secrets.DOCKERHUB_USERNAME }} | |
password: ${{ secrets.DOCKERHUB_READONLY_TOKEN }} | |
- name: Log in to GitHub CR | |
uses: docker/login-action@v3 | |
with: | |
registry: ghcr.io | |
username: ${{ github.actor }} | |
password: ${{ github.token }} | |
- name: Load docker assets | |
uses: actions/download-artifact@v4 | |
with: | |
name: ${{ needs.assets.outputs.retrieved_assets }} | |
path: /tmp/assets | |
- name: Retrieve assets | |
id: release_info | |
run: | | |
release_id=${{ fromJson(needs.assets.outputs.docker_images)[format('{0}', matrix.info_file)].release_id }} | |
assets_folder=assets && mkdir "$assets_folder" && cd "$assets_folder" | |
cp -r /tmp/assets/${{ matrix.info_file }}/. "$(pwd)" && rm -rf /tmp/assets | |
if ${{ github.event_name == 'workflow_dispatch' && inputs.include_docs }}; then | |
mkdir documentation && mv -v /tmp/assets/documentation/* documentation | |
fi | |
build_info=${{ matrix.info_file }}* | |
platforms=`cat $build_info | grep -o 'platforms: \S*' | cut -d ' ' -f 2` | |
cudaqbase_image=`cat $build_info | grep -o 'cuda-quantum-image: \S*' | cut -d ' ' -f 2` | |
cudaqdev_image=`cat $build_info | grep -o 'cuda-quantum-dev-image: \S*' | cut -d ' ' -f 2` | |
cudaqdevdeps_image=`cat $build_info | grep -o 'cuda-quantum-devdeps-image: \S*' | cut -d ' ' -f 2` | |
for file in `ls *zip`; do unzip "$file" && rm "$file"; done && cd - | |
docker pull $cudaqbase_image | |
base_tag=`docker inspect $cudaqbase_image --format='{{json .Config.Labels}}' | jq -r '."org.opencontainers.image.version"'` | |
image_title=`docker inspect $cudaqbase_image --format='{{json .Config.Labels}}' | jq -r '."org.opencontainers.image.title"'` | |
image_description=`docker inspect $cudaqbase_image --format='{{json .Config.Labels}}' | jq -r '."org.opencontainers.image.description"'` | |
docker image rm $cudaqbase_image | |
docker image prune --force | |
registry=`echo $cudaqbase_image | rev | cut -d / -f2- | rev` | |
push_to_ngc=`([ "$registry" == "${registry#nvcr.io}" ] && echo false) || echo true` | |
echo "release_id=$release_id" >> $GITHUB_OUTPUT | |
echo "push_to_ngc=$push_to_ngc" >> $GITHUB_OUTPUT | |
echo "image_name=$registry/${{ vars.packages_prefix }}cuda-quantum" >> $GITHUB_OUTPUT | |
echo "image_tag=${base_tag%-base}" >> $GITHUB_OUTPUT | |
echo "image_title=$image_title" >> $GITHUB_OUTPUT | |
echo "image_description=$image_description" >> $GITHUB_OUTPUT | |
echo "platforms=$platforms" >> $GITHUB_OUTPUT | |
echo "cudaqbase_image=$cudaqbase_image" >> $GITHUB_OUTPUT | |
echo "cudaqdev_image=$cudaqdev_image" >> $GITHUB_OUTPUT | |
echo "cudaqdevdeps_image=$cudaqdevdeps_image" >> $GITHUB_OUTPUT | |
echo "assets_folder=$assets_folder" >> $GITHUB_OUTPUT | |
env: | |
GH_TOKEN: ${{ secrets.REPO_BOT_ACCESS_TOKEN }} | |
- name: Log in to default registry | |
if: steps.release_info.outputs.push_to_ngc != 'true' | |
uses: docker/login-action@v3 | |
with: | |
registry: ${{ vars.registry }} | |
username: ${{ github.actor }} | |
password: ${{ github.token }} | |
- name: Log in to NGC registry | |
if: steps.release_info.outputs.push_to_ngc == 'true' | |
uses: docker/login-action@v3 | |
with: | |
registry: 'nvcr.io' | |
username: '$oauthtoken' | |
password: ${{ secrets.NGC_CREDENTIALS }} | |
- name: Set up buildx runner | |
uses: docker/setup-buildx-action@v3 | |
- name: Extract cuda-quantum metadata | |
id: metadata | |
uses: docker/metadata-action@v5 | |
with: | |
images: ${{ steps.release_info.outputs.image_name }} | |
flavor: latest=false | |
tags: type=raw,value=${{ steps.release_info.outputs.image_tag }} | |
labels: | | |
org.opencontainers.image.title=${{ steps.release_info.outputs.image_title }} | |
org.opencontainers.image.description=${{ steps.release_info.outputs.image_description }} | |
- name: Build cuda-quantum image | |
id: cudaq_build | |
uses: docker/build-push-action@v5 | |
with: | |
context: . | |
file: ./docker/release/cudaq.ext.Dockerfile | |
build-args: | | |
base_image=${{ steps.release_info.outputs.cudaqbase_image }} | |
assets=${{ steps.release_info.outputs.assets_folder }} | |
vscode_config=docker/release/config/.vscode | |
tags: ${{ steps.metadata.outputs.tags }} | |
labels: ${{ steps.metadata.outputs.labels }} | |
platforms: ${{ steps.release_info.outputs.platforms }} | |
provenance: false | |
push: true | |
- name: Install Cosign | |
uses: sigstore/cosign-installer@v3.3.0 | |
with: | |
cosign-release: 'v2.2.2' | |
- name: Sign image with GitHub OIDC Token | |
if: steps.release_info.outputs.push_to_ngc != 'true' | |
env: | |
DIGEST: ${{ steps.cudaq_build.outputs.digest }} | |
TAGS: ${{ steps.metadata.outputs.tags }} | |
run: cosign sign --yes --recursive "${TAGS}@${DIGEST}" | |
- name: Install NGC CLI | |
if: steps.release_info.outputs.push_to_ngc == 'true' | |
uses: ./.github/actions/install-ngc-cli | |
with: | |
version: 3.31.0 | |
checksum: b715e503e2c0b44814a51f330eafd605f5d240ea0987bf615700d359c993f138 | |
- name: Sign image with NGC CLI | |
if: steps.release_info.outputs.push_to_ngc == 'true' | |
env: | |
TAGS: ${{ steps.metadata.outputs.tags }} | |
NGC_CLI_API_KEY: ${{ secrets.NGC_CREDENTIALS }} | |
NGC_CLI_ORG: ${{ github.repository_owner }} | |
NGC_CLI_TEAM: 'nightly' | |
run: | | |
echo "Signing ${TAGS}" | |
ngc-cli/ngc registry image publish --source ${TAGS} ${TAGS} --sign | |
- name: Update release information | |
run: | | |
release_id=${{ steps.release_info.outputs.release_id }} | |
gh release view $release_id -R ${{ vars.assets_repo || github.repository }} --json body --jq .body > rel_notes.txt | |
header_length=`cat rel_notes.txt | grep -n "Release notes generated" | cut -d ':' -f 1` | |
head -n $(($header_length - 1)) rel_notes.txt > new_notes.txt | |
echo -e "\nImages for ${{ steps.release_info.outputs.platforms }}:\n" >> new_notes.txt | |
echo "- cuda-quantum (base image): ${{ steps.release_info.outputs.cudaqbase_image }}" >> new_notes.txt | |
echo "- cuda-quantum (with hpc components): ${{ steps.release_info.outputs.image_name }}@${{ steps.cudaq_build.outputs.digest }}" >> new_notes.txt | |
echo "- cuda-quantum-dev (for extension development): ${{ steps.release_info.outputs.cudaqdev_image }}" >> new_notes.txt | |
echo "- cuda-quantum-devdeps (development dependencies only): ${{ steps.release_info.outputs.cudaqdevdeps_image }}" >> new_notes.txt | |
(echo && tail -n +$header_length rel_notes.txt) >> new_notes.txt | |
gh release edit $release_id -R ${{ vars.assets_repo || github.repository }} --notes-file new_notes.txt | |
env: | |
GH_TOKEN: ${{ secrets.REPO_BOT_ACCESS_TOKEN }} | |
- name: Configure validation | |
uses: cloudposse/github-action-matrix-outputs-write@1.0.0 | |
with: | |
matrix-step-name: docker_images | |
matrix-key: ${{ matrix.info_file }} | |
outputs: | | |
image_hash: ${{ steps.release_info.outputs.image_name }}@${{ steps.cudaq_build.outputs.digest }} | |
cudaq_installers: | |
name: CUDA-Q installer | |
if: ${{ toJson(fromJson(needs.assets.outputs.installers).info_files) != '[]' }} | |
needs: assets | |
permissions: | |
contents: read | |
packages: read | |
id-token: write | |
environment: | |
name: ghcr-deployment | |
url: ${{ vars.deployment_url }} | |
strategy: | |
matrix: | |
info_file: ${{ fromJson(needs.assets.outputs.installers).info_files }} | |
fail-fast: false | |
runs-on: ${{ (contains(matrix.info_file, 'arm') && 'linux-arm64-cpu8') || 'linux-amd64-cpu8' }} | |
steps: | |
- name: Checkout repository | |
uses: actions/checkout@v4 | |
with: | |
ref: ${{ inputs.github_commit || needs.assets.outputs.github_commit }} | |
- name: Load build assets | |
uses: actions/download-artifact@v4 | |
with: | |
name: ${{ needs.assets.outputs.retrieved_assets }} | |
path: /tmp/assets | |
- name: Retrieve assets | |
id: release_info | |
run: | | |
release_id=${{ fromJson(needs.assets.outputs.installers)[format('{0}', matrix.info_file)].release_id }} | |
assets_folder=assets && mkdir "$assets_folder" && cd "$assets_folder" | |
cp -r /tmp/assets/${{ matrix.info_file }}/. "$(pwd)" | |
build_info=${{ matrix.info_file }}* | |
platform=`cat $build_info | grep -o 'platform: \S*' | cut -d ' ' -f 2` | |
platform_id=`echo $platform | sed 's/linux\///g' | tr -d ' '` | |
platform_arch=`([ "$platform_id" == "amd64" ] && echo x86_64) || ([ "$platform_id" == "arm64" ] && echo aarch64) || echo any` | |
cuda_version=`cat $build_info | grep -o 'cuda-version: \S*' | cut -d ' ' -f 2` | |
assets_image=`cat $build_info | grep -o 'cuda-quantum-assets-image: \S*' | cut -d ' ' -f 2` | |
platform_base_image=`cat $build_info | grep -o 'platform-base-image: \S*' | cut -d ' ' -f 2` | |
openmpi_buildcache=`cat $build_info | grep -o 'openmpi-build-cache: \S*' | cut -d ' ' -f 2` | |
for file in `ls *zip`; do unzip "$file" && rm "$file"; done && cd - | |
echo "release_id=$release_id" >> $GITHUB_OUTPUT | |
echo "platform=$platform" >> $GITHUB_OUTPUT | |
echo "platform_arch=$platform_arch" >> $GITHUB_OUTPUT | |
echo "cuda_major_version=$(echo $cuda_version | cut -d . -f1)" >> $GITHUB_OUTPUT | |
echo "assets_image=$assets_image" >> $GITHUB_OUTPUT | |
echo "platform_base_image=$platform_base_image" >> $GITHUB_OUTPUT | |
echo "openmpi_buildcache=$openmpi_buildcache" >> $GITHUB_OUTPUT | |
env: | |
GH_TOKEN: ${{ secrets.REPO_BOT_ACCESS_TOKEN }} | |
- name: Log in to DockerHub | |
uses: docker/login-action@v3 | |
with: | |
username: ${{ secrets.DOCKERHUB_USERNAME }} | |
password: ${{ secrets.DOCKERHUB_READONLY_TOKEN }} | |
- name: Log in to GitHub CR | |
uses: docker/login-action@v3 | |
with: | |
registry: ghcr.io | |
username: ${{ github.actor }} | |
password: ${{ github.token }} | |
- name: Set up buildx runner | |
uses: docker/setup-buildx-action@v3 | |
- name: Build installer | |
uses: docker/build-push-action@v5 | |
with: | |
context: . | |
file: ./docker/release/installer.Dockerfile | |
build-args: | | |
base_image=${{ steps.release_info.outputs.assets_image }} | |
additional_components=assets | |
platforms: ${{ steps.release_info.outputs.platform }} | |
outputs: type=local,dest=/tmp/install | |
- name: Update release information | |
run: | | |
release_id=${{ steps.release_info.outputs.release_id }} | |
gh release view $release_id -R ${{ vars.assets_repo || github.repository }} --json body --jq .body > rel_notes.txt | |
header_length=`cat rel_notes.txt | grep -n "Release notes generated" | cut -d ':' -f 1` | |
head -n $(($header_length - 1)) rel_notes.txt > new_notes.txt | |
echo -e "\nImage to create ${{ steps.release_info.outputs.platform }} installer for CUDA ${{ steps.release_info.outputs.cuda_major_version }}:\n" >> new_notes.txt | |
echo "- cuda-quantum-assets: ${{ steps.release_info.outputs.assets_image }}" >> new_notes.txt | |
(echo && tail -n +$header_length rel_notes.txt) >> new_notes.txt | |
gh release edit $release_id -R ${{ vars.assets_repo || github.repository }} --notes-file new_notes.txt | |
env: | |
GH_TOKEN: ${{ secrets.REPO_BOT_ACCESS_TOKEN }} | |
- name: Upload installer | |
uses: actions/upload-artifact@v4 | |
with: | |
name: ${{ steps.release_info.outputs.platform_arch }}-cu${{ steps.release_info.outputs.cuda_major_version }}-installer | |
path: /tmp/install | |
retention-days: 1 | |
if-no-files-found: error | |
cudaq_wheels: | |
name: CUDA-Q Python wheels | |
if: ${{ toJson(fromJson(needs.assets.outputs.python_wheels).info_files) != '[]' }} | |
needs: assets | |
permissions: | |
contents: read | |
packages: read | |
environment: | |
name: ghcr-deployment | |
url: ${{ vars.deployment_url }} | |
strategy: | |
matrix: | |
info_file: ${{ fromJson(needs.assets.outputs.python_wheels).info_files }} | |
python_version: ['3.10', '3.11', '3.12'] | |
fail-fast: false | |
runs-on: ${{ (contains(matrix.info_file, 'arm') && 'linux-arm64-cpu8') || 'linux-amd64-cpu8' }} | |
steps: | |
- name: Checkout repository | |
uses: actions/checkout@v4 | |
with: | |
ref: ${{ inputs.github_commit || needs.assets.outputs.github_commit }} | |
- name: Load wheel assets | |
uses: actions/download-artifact@v4 | |
with: | |
name: ${{ needs.assets.outputs.retrieved_assets }} | |
path: /tmp/assets | |
- name: Retrieve assets | |
id: release_info | |
run: | | |
release_id=${{ fromJson(needs.assets.outputs.python_wheels)[format('{0}', matrix.info_file)].release_id }} | |
assets_folder=$(pwd)/assets && mkdir "$assets_folder" | |
cd /tmp/assets/${{ matrix.info_file }} | |
build_info=${{ matrix.info_file }}* | |
platform=`cat $build_info | grep -o 'platform: \S*' | cut -d ' ' -f 2` | |
platform_id=`echo $platform | sed 's/linux\///g' | tr -d ' '` | |
platform_arch=`([ "$platform_id" == "amd64" ] && echo x86_64) || ([ "$platform_id" == "arm64" ] && echo aarch64) || echo any` | |
cuda_version=`cat $build_info | grep -o 'cuda-version: \S*' | cut -d ' ' -f 2` | |
cudaqwheeldeps_image=`cat $build_info | grep -o 'cuda-quantum-wheeldeps-image: \S*' | cut -d ' ' -f 2` | |
for file in `ls *zip`; do unzip -j "$file" -d "$assets_folder" && rm "$file"; done | |
echo "release_id=$release_id" >> $GITHUB_OUTPUT | |
echo "platform=$platform" >> $GITHUB_OUTPUT | |
echo "platform_arch=$platform_arch" >> $GITHUB_OUTPUT | |
echo "cuda_major_version=$(echo $cuda_version | cut -d . -f1)" >> $GITHUB_OUTPUT | |
echo "cudaqwheeldeps_image=$cudaqwheeldeps_image" >> $GITHUB_OUTPUT | |
echo "assets_folder=$assets_folder" >> $GITHUB_OUTPUT | |
echo "docker_output=type=local,dest=/tmp/wheels" >> $GITHUB_OUTPUT | |
env: | |
GH_TOKEN: ${{ secrets.REPO_BOT_ACCESS_TOKEN }} | |
- name: Log in to DockerHub | |
uses: docker/login-action@v3 | |
with: | |
username: ${{ secrets.DOCKERHUB_USERNAME }} | |
password: ${{ secrets.DOCKERHUB_READONLY_TOKEN }} | |
- name: Log in to GitHub CR | |
uses: docker/login-action@v3 | |
with: | |
registry: ghcr.io | |
username: ${{ github.actor }} | |
password: ${{ github.token }} | |
- name: Set up context for buildx | |
run: | | |
docker context create builder_context | |
- name: Set up buildx runner | |
uses: docker/setup-buildx-action@v3 | |
with: | |
endpoint: builder_context | |
driver-opts: network=host | |
- name: Build cuda-quantum wheel | |
id: build_wheel | |
uses: docker/build-push-action@v5 | |
with: | |
context: . | |
file: ./docker/release/cudaq.wheel.Dockerfile | |
build-args: | | |
base_image=${{ steps.release_info.outputs.cudaqwheeldeps_image }} | |
release_version=${{ needs.assets.outputs.cudaq_version }} | |
python_version=${{ matrix.python_version }} | |
outputs: ${{ steps.release_info.outputs.docker_output }} | |
- name: Set retention days | |
id: set_retention_days | |
run: | | |
# Save the x86_64-py3.10-wheels for longer because our some of our | |
# nightly jobs rely on them being present, even if we haven't done a | |
# fresh publishing in the last 24 hours. | |
if [ "${{ steps.release_info.outputs.platform_arch }}" == "x86_64" ] && [ "${{ matrix.python_version }}" == "3.10" ]; then | |
echo "retention_days=7" >> $GITHUB_OUTPUT | |
else | |
echo "retention_days=1" >> $GITHUB_OUTPUT | |
fi | |
- name: Upload wheels | |
uses: actions/upload-artifact@v4 | |
with: | |
name: ${{ steps.release_info.outputs.platform_arch }}-cu${{ steps.release_info.outputs.cuda_major_version }}-py${{ matrix.python_version }}-wheels | |
path: /tmp/wheels | |
retention-days: ${{ steps.set_retention_days.outputs.retention_days }} | |
if-no-files-found: error | |
cudaq_wheels_release_info: | |
name: Update release info of CUDA-Q Python wheels | |
needs: [assets, cudaq_wheels] | |
runs-on: ubuntu-latest | |
strategy: | |
matrix: | |
info_file: ${{ fromJson(needs.assets.outputs.python_wheels).info_files }} | |
fail-fast: false | |
steps: | |
- name: Update release information | |
run: | | |
release_id=${{ fromJson(needs.assets.outputs.python_wheels)[format('{0}', matrix.info_file)].release_id }} | |
platform=${{ fromJson(needs.assets.outputs.python_wheels)[format('{0}', matrix.info_file)].platform }} | |
cudaqwheeldeps_image=${{ fromJson(needs.assets.outputs.python_wheels)[format('{0}', matrix.info_file)].image_hash }} | |
gh release view $release_id -R ${{ vars.assets_repo || github.repository }} --json body --jq .body > rel_notes.txt | |
header_length=`cat rel_notes.txt | grep -n "Release notes generated" | cut -d ':' -f 1` | |
head -n $(($header_length - 1)) rel_notes.txt > new_notes.txt | |
echo -e "\nImage to create $platform wheels:\n" >> new_notes.txt | |
echo "- cuda-quantum-devdeps (development dependencies only): $cudaqwheeldeps_image" >> new_notes.txt | |
(echo && tail -n +$header_length rel_notes.txt) >> new_notes.txt | |
gh release edit $release_id -R ${{ vars.assets_repo || github.repository }} --notes-file new_notes.txt | |
env: | |
GH_TOKEN: ${{ secrets.REPO_BOT_ACCESS_TOKEN }} | |
cudaq_metapackages: | |
name: CUDA-Q metapackages | |
needs: [assets, cudaq_wheels] | |
uses: ./.github/workflows/python_metapackages.yml | |
with: | |
cudaq_version: ${{ needs.assets.outputs.cudaq_version }} | |
python_versions: "['3.10', '3.11', '3.12']" | |
cuda_versions: "['', '11.8', '12.0']" | |
wheel_artifacts: '*-wheels' | |
github_commit: ${{ inputs.github_commit || needs.assets.outputs.github_commit }} | |
image_validation_config: | |
name: Configure image validation | |
needs: cudaq_images | |
runs-on: ubuntu-latest | |
outputs: | |
docker_images: ${{ steps.validation.outputs.docker_images }} | |
steps: | |
- name: Get matrix job output | |
id: read_json | |
uses: cloudposse/github-action-matrix-outputs-read@1.0.0 | |
with: | |
matrix-step-name: docker_images | |
- name: Prepare validation | |
id: validation | |
run: | | |
images=`echo '${{ steps.read_json.outputs.result }}' | jq '[.image_hash[] | select(. != "")]'` | |
echo "docker_images=$(echo $images)" >> $GITHUB_OUTPUT | |
image_validation: | |
name: Docker image validation | |
needs: [assets, image_validation_config] | |
runs-on: linux-amd64-gpu-v100-latest-1 | |
permissions: | |
contents: read | |
packages: read | |
strategy: | |
matrix: | |
image_hash: ${{ fromJson(needs.image_validation_config.outputs.docker_images) }} | |
fail-fast: false | |
container: | |
image: ${{ matrix.image_hash }} | |
credentials: | |
username: ${{ github.actor }} | |
password: ${{ github.token }} | |
options: --user root # otherwise step summary doesn't work | |
env: | |
NVIDIA_VISIBLE_DEVICES: ${{ env.NVIDIA_VISIBLE_DEVICES }} | |
TERM: xterm | |
steps: | |
- name: Checkout repository | |
uses: actions/checkout@v4 | |
with: | |
ref: ${{ inputs.github_commit || needs.assets.outputs.github_commit }} | |
path: github-repo | |
sparse-checkout: | | |
scripts | |
docs | |
- name: Basic validation (GPU backends) | |
shell: bash | |
run: | | |
backends_to_test=`\ | |
for file in $(ls $CUDA_QUANTUM_PATH/targets/*.yml); \ | |
do | |
if [ -n "$(cat $file | grep "gpu-requirements")" ]; then \ | |
basename $file | cut -d "." -f 1; \ | |
elif [ -n "$(basename $file | grep mqpu)" ]; then \ | |
echo remote-mqpu; \ | |
fi; \ | |
done` | |
rm -rf examples applications targets && mv github-repo/docs/sphinx/examples examples && mv github-repo/docs/sphinx/applications applications && mv github-repo/docs/sphinx/targets targets | |
mv github-repo/docs/notebook_validation.py . | |
GITHUB_STEP_SUMMARY=$GITHUB_STEP_SUMMARY \ | |
bash github-repo/scripts/validate_container.sh $backends_to_test | tee /tmp/validation.out | |
# Check that the tests included the nvidia-mgpu backend: | |
relevant_line=`grep -n "Testing backends:" /tmp/validation.out | cut -d : -f1` | |
tested_backends=`cat /tmp/validation.out | tail -n +$relevant_line | sed -e '/^$/,$d'` | |
if [ -z "$(echo $tested_backends | grep nvidia-mgpu)" ]; then | |
echo "::error::Missing tests for nvidia-mgpu backend." | |
exit 1 | |
fi | |
- name: MPI validation | |
shell: bash | |
run: | | |
status_sum=0 && set +e # Allow script to keep going through errors | |
for ex in `find /home/cudaq/examples/other/distributed/ -name '*.cpp'`; do | |
# Set CUDAQ_ENABLE_MPI_EXAMPLE to activate these examples. | |
nvq++ -DCUDAQ_ENABLE_MPI_EXAMPLE=1 $ex | |
status=$? | |
if [ $status -eq 0 ]; then | |
# Run with mpiexec | |
mpiexec --allow-run-as-root -np 4 ./a.out | |
status=$? | |
filename=$(basename -- "$ex") | |
if [ $status -eq 0 ]; then | |
echo ":white_check_mark: Successfully ran $filename." >> $GITHUB_STEP_SUMMARY | |
else | |
echo ":x: Failed to execute $filename." >> $GITHUB_STEP_SUMMARY | |
status_sum=$((status_sum+1)) | |
fi | |
else | |
echo ":x: Compilation failed for $filename." >> $GITHUB_STEP_SUMMARY | |
status_sum=$((status_sum+1)) | |
fi | |
done | |
set -e # Re-enable exit code error checking | |
if [ ! $status_sum -eq 0 ]; then | |
echo "::error::$status_sum examples failed; see step summary for a list of failures." | |
exit $status_sum | |
fi | |
installer_validation: | |
name: Installer validation | |
needs: [assets, cudaq_installers] | |
runs-on: linux-amd64-gpu-v100-latest-1 | |
permissions: | |
contents: read | |
strategy: | |
matrix: | |
os_image: ['redhat/ubi8:8.0', 'ubuntu:22.04'] | |
cuda_version: ['11.8', '12.0'] | |
fail-fast: false | |
container: | |
image: ${{ matrix.os_image }} | |
options: --user root | |
env: | |
NVIDIA_VISIBLE_DEVICES: ${{ env.NVIDIA_VISIBLE_DEVICES }} | |
TERM: xterm | |
steps: | |
- name: Checkout repository | |
uses: actions/checkout@v4 | |
with: | |
ref: ${{ inputs.github_commit || needs.assets.outputs.github_commit }} | |
- name: Configure validation | |
id: config | |
run: | | |
cuda_major=`echo ${{ matrix.cuda_version }} | cut -d . -f1` | |
echo "cuda_major=$cuda_major" >> $GITHUB_OUTPUT | |
- name: Load installer | |
uses: actions/download-artifact@v4 | |
with: | |
name: x86_64-cu${{ steps.config.outputs.cuda_major }}-installer | |
path: /tmp/install | |
- name: Runtime dependencies (apt) | |
if: startsWith(matrix.os_image, 'ubuntu') | |
run: | | |
apt-get update && apt-get install -y --no-install-recommends \ | |
wget ca-certificates libc6-dev libopenmpi-dev | |
distro=`echo ${{ matrix.os_image }} | tr -d . | tr -d :` | |
CUDA_DOWNLOAD_URL=https://developer.download.nvidia.com/compute/cuda/repos | |
wget "${CUDA_DOWNLOAD_URL}/$distro/x86_64/cuda-keyring_1.1-1_all.deb" | |
dpkg -i cuda-keyring_1.1-1_all.deb | |
cuda_version_suffix="$(echo ${{ matrix.cuda_version }} | tr . -)" | |
apt-get update && apt-get install -y --no-install-recommends \ | |
libcublas-$cuda_version_suffix \ | |
cuda-cudart-$cuda_version_suffix \ | |
libcusolver-$cuda_version_suffix | |
if [ $(echo ${{ matrix.cuda_version }} | cut -d . -f1) -gt 11 ]; then | |
apt-get install -y --no-install-recommends \ | |
libnvjitlink-$cuda_version_suffix | |
fi | |
- name: Runtime dependencies (dnf) | |
if: startsWith(matrix.os_image, 'redhat') | |
run: | | |
dnf install -y --nobest --setopt=install_weak_deps=False \ | |
'dnf-command(config-manager)' glibc-devel openssh-clients | |
CUDA_VERSION=${{ matrix.cuda_version }} | |
DISTRIBUTION=rhel8 | |
. scripts/configure_build.sh install-cudart | |
# We need to install an MPI implementation, otherwise nothing | |
# will be able to run on the nvidia-mgpu backend. | |
# The easiest is to just install it via conda. | |
dnf install -y --nobest --setopt=install_weak_deps=False wget | |
mkdir -p ~/.miniconda3 | |
wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-$(uname -m).sh -O ~/.miniconda3/miniconda.sh | |
bash ~/.miniconda3/miniconda.sh -b -u -p ~/.miniconda3 | |
rm -rf ~/.miniconda3/miniconda.sh | |
eval "$(~/.miniconda3/bin/conda shell.bash hook)" | |
conda install -y -c conda-forge openmpi | |
echo 'eval "$(~/.miniconda3/bin/conda shell.bash hook)"' >> ~/.bashrc | |
echo 'export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$CONDA_PREFIX/lib"' >> ~/.bashrc | |
echo "export OMPI_MCA_opal_cuda_support=true OMPI_MCA_btl='^openib'" >> ~/.bashrc | |
- name: Install and run sanity checks | |
shell: bash | |
env: | |
BASH_ENV: ~/.bashrc | |
run: | | |
installer=install_cuda_quantum_cu$(echo ${{ matrix.cuda_version }} | cut -d . -f1).x86_64 | |
chmod +x /tmp/install/$installer | |
/tmp/install/$installer --accept | |
rm -rf examples applications targets | |
mv docs/sphinx/examples examples && mv docs/sphinx/applications applications && mv docs/sphinx/targets targets | |
rm -rf examples/python && rm -rf applications/python && rm -rf targets/python | |
GITHUB_STEP_SUMMARY=$GITHUB_STEP_SUMMARY \ | |
bash -l scripts/validate_container.sh | tee /tmp/validation.out | |
# Check that the tests included the nvidia-mgpu backend: | |
relevant_line=`grep -n "Testing backends:" /tmp/validation.out | cut -d : -f1` | |
tested_backends=`cat /tmp/validation.out | tail -n +$relevant_line | sed -e '/^$/,$d'` | |
if [ -z "$(echo $tested_backends | grep nvidia-mgpu)" ]; then | |
echo "::error::Missing tests for nvidia-mgpu backend." | |
exit 1 | |
fi | |
wheel_validation_piponly: | |
name: Wheel validation, pip only | |
needs: [assets, cudaq_wheels, cudaq_metapackages] | |
runs-on: linux-amd64-gpu-v100-latest-1 | |
permissions: | |
contents: read | |
strategy: | |
matrix: | |
cuda_major: ['', '11', '12'] | |
fail-fast: false | |
container: | |
image: ubuntu:22.04 | |
options: --user root | |
env: | |
NVIDIA_VISIBLE_DEVICES: ${{ env.NVIDIA_VISIBLE_DEVICES }} | |
TERM: xterm | |
steps: | |
- name: Checkout repository | |
uses: actions/checkout@v4 | |
with: | |
ref: ${{ inputs.github_commit || needs.assets.outputs.github_commit }} | |
- name: Load wheels | |
uses: actions/download-artifact@v4 | |
with: | |
pattern: '*-wheels' | |
path: /tmp/wheels | |
merge-multiple: true | |
- name: Load metapackage | |
if: ${{ matrix.cuda_major == '' }} | |
uses: actions/download-artifact@v4 | |
with: | |
name: ${{ needs.cudaq_metapackages.outputs.artifact_name }} | |
path: /tmp/packages | |
- name: Run x86 validation | |
shell: bash | |
run: | | |
# These simple steps are only expected to work for x86 and only for | |
# targets and test cases that don't require MPI. | |
# Create clean python3 environment. | |
apt-get update && apt-get install -y --no-install-recommends python3 python3-pip | |
mkdir -p /tmp/packages && mv /tmp/wheels/* /tmp/packages && rmdir /tmp/wheels | |
python3 -m pip install pypiserver | |
server=`find / -name pypi-server -executable -type f` | |
$server run -p 8080 /tmp/packages & | |
if [ -n "${{ matrix.cuda_major }}" ]; then | |
pip install cuda-quantum-cu${{ matrix.cuda_major }} -v \ | |
--extra-index-url http://localhost:8080 | |
else | |
pip install --upgrade pip | |
pip install cudaq -v \ | |
--extra-index-url http://localhost:8080 \ | |
2>&1 | tee /tmp/install.out | |
if [ -z "$(cat /tmp/install.out | grep -o 'Autodetection succeeded')" ]; then | |
echo "::error::Autodetection to determine cudaq binary distribution failed." | |
exit 1 | |
fi | |
fi | |
status_sum=0 | |
set +e # Allow script to keep going through errors | |
# Verify that the necessary GPU targets are installed and usable | |
# Note nvidia-mgpu requires MPI, so it is not available with this method. | |
for tgt in nvidia nvidia-fp64 tensornet; do | |
echo "Running with target ${tgt}" | |
python3 docs/sphinx/examples/python/intro.py --target ${tgt} | |
if [ $? -ne 0 ]; then | |
echo -e "\e[01;31mPython trivial test for target ${tgt} failed.\e[0m" >&2 | |
status_sum=$((status_sum+1)) | |
fi | |
done | |
set -e # Re-enable exit code error checking | |
if [ "$status_sum" -ne "0" ]; then | |
echo "::error::Error running validation script" | |
exit $status_sum | |
fi | |
metapackage_validation_conda: | |
name: Python metapackage validation, conda environment | |
needs: [assets, cudaq_wheels, cudaq_metapackages] | |
runs-on: linux-amd64-gpu-v100-latest-1 | |
permissions: | |
contents: read | |
strategy: | |
matrix: | |
cuda_version: ['11.8', '12.4'] | |
fail-fast: false | |
container: | |
image: ubuntu:22.04 | |
options: --user root | |
env: | |
NVIDIA_VISIBLE_DEVICES: ${{ env.NVIDIA_VISIBLE_DEVICES }} | |
TERM: xterm | |
steps: | |
- name: Checkout repository | |
uses: actions/checkout@v4 | |
with: | |
ref: ${{ inputs.github_commit || needs.assets.outputs.github_commit }} | |
- name: Load wheels | |
uses: actions/download-artifact@v4 | |
with: | |
pattern: '*-wheels' | |
path: /tmp/wheels | |
merge-multiple: true | |
- name: Load metapackage | |
uses: actions/download-artifact@v4 | |
with: | |
name: ${{ needs.cudaq_metapackages.outputs.artifact_name }} | |
path: /tmp/packages | |
- name: Run validation | |
shell: bash | |
run: | | |
apt-get update && apt-get install -y --no-install-recommends \ | |
ca-certificates vim wget unzip openssh-client | |
mv /tmp/wheels/* /tmp/packages && rmdir /tmp/wheels | |
# Extract README from metapackage | |
cudaq_metapackage=cudaq-${{ needs.assets.outputs.cudaq_version }} | |
tar xf /tmp/packages/${cudaq_metapackage}.tar.gz && mv -v ${cudaq_metapackage}/README.md . | |
rm -rf ${cudaq_metapackage} && readme=README.md | |
# Setup links for validate_pycudaq.sh script | |
ln -s $GITHUB_WORKSPACE/scripts/validate_pycudaq.sh . | |
ln -s $GITHUB_WORKSPACE/docs/sphinx/examples/python /tmp/examples | |
ln -s $GITHUB_WORKSPACE/docs/sphinx/applications/python /tmp/applications | |
ln -s $GITHUB_WORKSPACE/docs/sphinx/targets/python /tmp/targets | |
ln -s $GITHUB_WORKSPACE/docs/sphinx/snippets/python /tmp/snippets | |
ln -s $GITHUB_WORKSPACE/python/tests /tmp/tests | |
ln -s $GITHUB_WORKSPACE/$readme /tmp/README.md | |
# Run the script w/ -q to run a shortened test | |
set +e # Allow script to keep going through errors (needed for skipped tests) | |
source validate_pycudaq.sh \ | |
-v ${{ needs.assets.outputs.cudaq_version }} \ | |
-i /tmp/packages -f /tmp \ | |
-c ${{ matrix.cuda_version }} -p 3.10 | |
set -e # Re-enable exit code error checking | |
expected_dependency=cuda-quantum-cu$(echo ${{ matrix.cuda_version }} | cut -d . -f1) | |
if [ -z "$(python3 -m pip list | grep ${expected_dependency})" ]; then | |
echo "::error::Missing installation of ${expected_dependency} package." | |
exit 1 | |
fi | |
if [ "$status_sum" -ne "0" ]; then | |
echo "::error::Error running validation script" | |
exit $status_sum | |
fi | |
create_release: | |
name: CUDA-Q Release | |
needs: [assets, cudaq_images, cudaq_installers, cudaq_wheels, cudaq_metapackages] | |
if: needs.assets.outputs.release_title && inputs.github_commit == '' && inputs.assets_from_run == '' && inputs.nvidia_mgpu_commit == '' | |
runs-on: ubuntu-latest | |
environment: | |
name: ghcr-deployment | |
url: ${{ vars.deployment_url }} | |
steps: | |
- name: Download CUDA-Q installer | |
uses: actions/download-artifact@v4 | |
with: | |
pattern: '*-installer' | |
path: installers | |
- name: Download CUDA-Q Python wheels | |
uses: actions/download-artifact@v4 | |
with: | |
pattern: '*-wheels' | |
path: wheelhouse | |
- name: Download CUDA-Q metapackages | |
uses: actions/download-artifact@v4 | |
with: | |
name: ${{ needs.cudaq_metapackages.outputs.artifact_name }} | |
path: metapackages | |
# The python wheels are uploaded as a release asset, but not pushed to anywhere else. | |
# Note that PyPI packages cannot be updated once pushed; | |
# - We could upload wheels to test-pypi when creating a release. | |
# - The push to pypi itself should be done manually. | |
# See also: | |
# https://github.com/pypa/gh-action-pypi-publish | |
# https://packaging.python.org/en/latest/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows/ | |
- name: Create release | |
run: | | |
for installer in `find installers/ -type f -not -name '*.whl'`; do | |
mv -v "$installer" "$(basename -- "$installer")" | |
done | |
for dir in `ls wheelhouse/`; do | |
mv -v "wheelhouse/$dir"/* wheelhouse/ && rmdir "wheelhouse/$dir" | |
done | |
zip -r wheelhouse.zip wheelhouse | |
zip -r metapackages.zip metapackages | |
release_id=${{ inputs.assets_from_run || github.run_id }} | |
release_title="${{ needs.assets.outputs.release_title }}" | |
github_commit=${{ needs.assets.outputs.github_commit }} | |
version=${{ needs.assets.outputs.release_version }} | |
versions=`gh release list -R ${{ vars.assets_repo || github.repository }} --exclude-drafts --exclude-pre-releases | egrep -o "([0-9]{1,}\.)+[0-9]{1,}\S*" | sort -r -V` | |
latest_tag=`echo $versions | cut -d ' ' -f 1` | |
prerelease=`([ "$(echo $version | egrep -o '([0-9]{1,}\.)+[0-9]{1,}')" == "$version" ] && echo '') || echo '--prerelease'` | |
rel_notes="Release created by workflow [${{ github.run_id }}](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})." | |
rel_notes+=$(echo "<br/>GitHub commit [$github_commit](https://github.com/${{ github.repository }}/tree/$github_commit)") | |
gh release create $release_id --title $release_id -R ${{ github.repository }} \ | |
--target $github_commit --draft $prerelease \ | |
--generate-notes --notes-start-tag $latest_tag --notes "$rel_notes" | |
gh release upload $release_id -R ${{ github.repository }} install_cuda_quantum* --clobber | |
gh release upload $release_id -R ${{ github.repository }} wheelhouse.zip --clobber | |
gh release upload $release_id -R ${{ github.repository }} metapackages.zip --clobber | |
gh release edit $release_id -R ${{ github.repository }} \ | |
--title "$release_title" --tag $version $prerelease # --draft=false | |
env: | |
GH_TOKEN: ${{ secrets.REPO_BOT_ACCESS_TOKEN }} | |
clean_up: | |
name: Clean up | |
needs: [assets, cudaq_images, cudaq_installers, cudaq_wheels, cudaq_metapackages, image_validation, installer_validation, metapackage_validation_conda, wheel_validation_piponly, create_release] | |
# Force this job to run even when some of the dependencies above are skipped. | |
if: always() && !cancelled() && needs.assets.result != 'skipped' && !contains(needs.*.result, 'failure') && !contains(needs.*.result, 'cancelled') | |
runs-on: ubuntu-latest | |
environment: | |
name: ghcr-deployment | |
url: ${{ vars.deployment_url }} | |
steps: | |
- name: Clean up draft releases | |
id: cleanup | |
run: | | |
# Delete the draft release(s) used to exchange data between GitLab and GitHub. | |
for release_id in `echo ${{ needs.assets.outputs.releases }}`; do | |
if [ -n "$(gh release list -R ${{ vars.assets_repo || github.repository }} | grep -s $release_id)" ]; then | |
echo "## Draft Release $release_id:" >> $GITHUB_STEP_SUMMARY | |
gh release view $release_id -R ${{ vars.assets_repo || github.repository }} --json body --jq .body > rel_notes.txt | |
header_length=`cat rel_notes.txt | grep -n "Release notes generated" | cut -d ':' -f 1` | |
head -n $(($header_length - 1)) rel_notes.txt >> $GITHUB_STEP_SUMMARY | |
gh release delete $release_id -R ${{ vars.assets_repo || github.repository }} -y | |
echo "Deleted release $release_id." | |
fi | |
done | |
env: | |
GH_TOKEN: ${{ secrets.REPO_BOT_ACCESS_TOKEN }} | |
- name: Delete artifacts | |
uses: actions/github-script@v7 | |
with: | |
script: | | |
const res = await github.rest.actions.listArtifactsForRepo({ | |
owner: context.repo.owner, | |
repo: context.repo.repo, | |
}) | |
res.data.artifacts | |
.filter(({ name }) => name === '${{ needs.assets.outputs.retrieved_assets }}') | |
.forEach(({ id }) => { | |
github.rest.actions.deleteArtifact({ | |
owner: context.repo.owner, | |
repo: context.repo.repo, | |
artifact_id: id, | |
}) | |
}) |