Publishing #3358
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
on: | |
workflow_dispatch: | |
inputs: | |
deployment_id: | |
type: string | |
description: The workflow id of the deployment that triggered the release creation. | |
required: true | |
assets_from_run: | |
type: string | |
description: Optional argument to take assets from a prior run of this workflow; facilitates rerunning a failed workflow without re-building the assets. | |
required: false | |
github_commit: | |
type: string | |
description: Optional argument to set the GitHub commit to use for the final build and validation. | |
required: false | |
nvidia_mgpu_commit: | |
type: string | |
description: Optional argument to set the GitLab commit to use for the nvidia-mgpu target. | |
required: false | |
include_docs: | |
type: boolean | |
description: Include the generated documentation in the docker image(s). | |
default: false | |
workflow_run: | |
workflows: | |
- Deployments | |
types: | |
- completed | |
name: Publishing | |
jobs: | |
assets: | |
name: Assets | |
if: github.event_name == 'workflow_dispatch' || github.event.workflow_run.conclusion == 'success' | |
runs-on: ubuntu-latest | |
permissions: | |
contents: read | |
environment: | |
name: ghcr-deployment | |
url: ${{ vars.deployment_url }} | |
outputs: | |
artifacts_url: ${{ steps.artifacts.outputs.artifacts_url }} | |
retrieved_assets: ${{ steps.assets_retrieval.outputs.artifact_name }} | |
github_commit: ${{ steps.artifacts.outputs.github_commit }} | |
docker_images: ${{ steps.artifacts.outputs.docker_images }} | |
python_wheels: ${{ steps.artifacts.outputs.python_wheels }} | |
installers: ${{ steps.artifacts.outputs.installers }} | |
releases: ${{ steps.artifacts.outputs.releases }} | |
release_title: ${{ steps.artifacts.outputs.release_title }} | |
release_version: ${{ steps.artifacts.outputs.release_version }} | |
cudaq_version: ${{ steps.artifacts.outputs.cudaq_version }} | |
steps: | |
- name: Checkout repository | |
uses: actions/checkout@v4 | |
with: | |
ref: ${{ vars.data_branch }} | |
token: ${{ secrets.REPO_BOT_ACCESS_TOKEN }} | |
- name: Download build info | |
id: artifacts | |
run: | | |
if ${{ inputs.deployment_id != '' }}; then | |
artifacts_url=https://api.github.com/repos/${{ github.repository }}/actions/runs/${{ inputs.deployment_id }}/artifacts | |
else | |
artifacts_url=${{ github.event.workflow_run.artifacts_url }} | |
echo "Artifacts downloaded from https://github.com/${{ github.repository }}/actions/runs/${{ github.event.workflow_run.id }}" >> $GITHUB_STEP_SUMMARY | |
fi | |
artifacts=$(gh api $artifacts_url -q '.artifacts[] | {name: .name, url: .archive_download_url}') | |
function download { | |
gh api $1 > info.zip | |
unzip -d info info.zip | |
for file in `find info/ -type f`; do | |
cat "$file" >> $2.txt | |
done | |
rm -rf info info.zip | |
} | |
docker_images="{\"info_files\":[]}" | |
python_wheels="{\"info_files\":[]}" | |
installers="{\"info_files\":[]}" | |
for artifact in `echo "$artifacts"`; do | |
name=`echo $artifact | jq -r '.name'` | |
url=`echo $artifact | jq -r '.url'` | |
if [ "${name%_publishing}" != "$name" ]; then | |
download "$url" "$name" | |
platforms=`cat "$name.txt" | egrep -o 'platforms?: \S*' | cut -d ' ' -f 2` | |
image_hash=`cat "$name.txt" | grep -o -e "cuda-quantum-dev-image: \S*" -e "cuda-quantum-assets-image: \S*" -e "cuda-quantum-wheeldeps-image: \S*" | cut -d ' ' -f 2` | |
if [ -n "$(echo $platforms | grep ',')" ]; then runner=linux-amd64-cpu8 | |
else runner=$(echo $platforms | tr / -)-cpu8 | |
fi | |
# Make sure to push all staging commits to a separate branch | |
# to ensure that a pipeline runs for each commit. | |
staging_branch=bot/${{ inputs.assets_from_run || github.run_id }}/$name | |
# Use a dedicated draft release for each set of assets to avoid issues | |
# with race conditions when running artifacts builds concurrently. | |
release_id=${{ inputs.assets_from_run || github.run_id }}_$name | |
releases+="$release_id " | |
info={\"$name\":{\"release_id\":\"$release_id\",\"staging_branch\":\"$staging_branch\",\"runner\":\"$runner\",\"artifacts_url\":\"$artifacts_url\",\"platform\":\"$platforms\",\"image_hash\":\"$image_hash\"}} | |
if [ "${name#image}" != "$name" ]; then | |
docker_images=`echo $docker_images | jq ".info_files |= . + [\"$name\"]"` | |
docker_images=`echo $docker_images | jq ". |= . + $info"` | |
elif [ "${name#python}" != "$name" ]; then | |
python_wheels=`echo $python_wheels | jq ".info_files |= . + [\"$name\"]"` | |
python_wheels=`echo $python_wheels | jq ". |= . + $info"` | |
elif [ "${name#installer}" != "$name" ]; then | |
installers=`echo $installers | jq ".info_files |= . + [\"$name\"]"` | |
installers=`echo $installers | jq ". |= . + $info"` | |
fi | |
elif [ "${name#deployment_info}" != "$name" ]; then | |
download "$url" "$name" | |
github_commit=`cat "$name.txt" | grep -o 'source-sha: \S*' | cut -d ' ' -f 2` | |
release_title=`cat "$name.txt" | grep -o 'release-title: \S*' | cut -d ' ' -f 2` | |
release_version=`cat "$name.txt" | grep -o 'release-version: \S*' | cut -d ' ' -f 2` | |
elif [ "$name" == "cuda_quantum_docs" ] && ${{ github.event_name == 'workflow_dispatch' && inputs.include_docs }}; then | |
docs_archive="$(pwd)/cuda_quantum_docs.zip" | |
gh api $url > "$docs_archive" | |
fi | |
done | |
# Version number of the Python wheels | |
if ${{ github.ref_type == 'tag' || startsWith(github.ref_name, 'releases/') }}; then | |
cudaq_version=`echo ${{ github.ref_name }} | egrep -o "([0-9]{1,}\.)+[0-9]{1,}"` | |
else | |
cudaq_version=${release_version:-0.0.0} | |
fi | |
echo "release_title=$release_title" >> $GITHUB_OUTPUT | |
echo "release_version=$release_version" >> $GITHUB_OUTPUT | |
echo "releases='$releases'" >> $GITHUB_OUTPUT | |
echo "github_commit=$github_commit" >> $GITHUB_OUTPUT | |
echo "docker_images=$(echo $docker_images)" >> $GITHUB_OUTPUT | |
echo "python_wheels=$(echo $python_wheels)" >> $GITHUB_OUTPUT | |
echo "installers=$(echo $installers)" >> $GITHUB_OUTPUT | |
echo "artifacts_url=$artifacts_url" >> $GITHUB_OUTPUT | |
echo "cudaq_version=$cudaq_version" >> $GITHUB_OUTPUT | |
echo "docs_archive=$docs_archive" >> $GITHUB_OUTPUT | |
env: | |
GH_TOKEN: ${{ github.token }} | |
- name: Trigger assets creation | |
id: assets_creation | |
if: inputs.assets_from_run == '' | |
run: | | |
# The commit title of the staging commit needs to match | |
# the docker image that is used to build additional components. | |
# Using the image sha as the file name and the docker image name | |
# as the folder is convenient for the GitLab CI. | |
git config --global user.name "cuda-quantum-bot" | |
git config --global user.email "cuda-quantum-bot@users.noreply.github.com" | |
current_branch=$(git rev-parse --abbrev-ref HEAD) | |
function create_assets { | |
release_id=`echo "$4" | jq -r ".$1.release_id"` | |
staging_branch=`echo "$4" | jq -r ".$1.staging_branch"` | |
artifacts_url=`echo "$4" | jq -r ".$1.artifacts_url"` | |
staging_folder="deployments/staging/$3" | |
image_hash=`cat "$1.txt" | grep -o "$2: \S*" | cut -d ' ' -f 2` | |
file_id=`echo $image_hash | rev | cut -d ':' -f 1 | rev` | |
if [ -n "$(cat "$1.txt" | tail -1)" ]; then echo >> "$1.txt"; fi | |
if ${{ inputs.nvidia_mgpu_commit != '' }}; then | |
sed -i '/nvidia-mgpu-commit/d' "$1.txt" | |
echo "nvidia-mgpu-commit: ${{ inputs.nvidia_mgpu_commit }}" >> "$1.txt" | |
fi | |
echo "asset-name: $1.txt" >> "$1.txt" | |
echo "release-id: $release_id" >> "$1.txt" | |
echo "artifacts-url: $artifacts_url" >> "$1.txt" | |
mkdir -p "$staging_folder" && mv -v "$1.txt" "$staging_folder/$file_id" | |
echo "Pushing $1 to $staging_branch" | |
git add "$staging_folder" && git commit -m "$image_hash" | |
git pull origin -- $staging_branch 2> /dev/null || true | |
git push origin $current_branch:$staging_branch | |
} | |
for file in ${{ join(fromJson(steps.artifacts.outputs.docker_images).info_files, ' ') }}; do | |
create_assets $file cuda-quantum-dev-image cuda-quantum-dev '${{ steps.artifacts.outputs.docker_images }}' | |
done | |
for file in ${{ join(fromJson(steps.artifacts.outputs.python_wheels).info_files, ' ') }}; do | |
create_assets $file cuda-quantum-wheeldeps-image cuda-quantum-devdeps '${{ steps.artifacts.outputs.python_wheels }}' | |
done | |
for file in ${{ join(fromJson(steps.artifacts.outputs.installers).info_files, ' ') }}; do | |
create_assets $file cuda-quantum-assets-image cuda-quantum-assets '${{ steps.artifacts.outputs.installers }}' | |
done | |
- name: Wait for assets | |
run: | | |
for release_id in `echo ${{ steps.artifacts.outputs.releases }}`; do | |
while [ -z "$(gh release list -R ${{ github.repository }} | grep -s $release_id)" ]; | |
do echo "Waiting for assets $release_id ..." && sleep 300; | |
done | |
done | |
env: | |
GH_TOKEN: ${{ secrets.REPO_BOT_ACCESS_TOKEN }} | |
# We can delete staging branch now after the expected draft releases have been created. | |
- name: Clean up | |
if: steps.assets_creation.outcome != 'skipped' | |
run: | | |
# Clean up the staging branch that was used to trigger the GitLab pipeline. | |
git config --global user.name "cuda-quantum-bot" | |
git config --global user.email "cuda-quantum-bot@users.noreply.github.com" | |
function delete_staging_branch { | |
staging_branch=`echo "$2" | jq -r ".$1.staging_branch"` | |
echo "Delete staging branch $staging_branch ..." | |
if [[ "$staging_branch" =~ ^bot\/.*$ ]]; then | |
git push origin --delete $staging_branch | |
else | |
echo "::error::Unexpected staging branch." | |
exit 1 | |
fi | |
} | |
for info_file in ${{ join(fromJson(steps.artifacts.outputs.docker_images).info_files, ' ') }}; do | |
delete_staging_branch $info_file '${{ steps.artifacts.outputs.docker_images }}' | |
done | |
for info_file in ${{ join(fromJson(steps.artifacts.outputs.python_wheels).info_files, ' ') }}; do | |
delete_staging_branch $info_file '${{ steps.artifacts.outputs.python_wheels }}' | |
done | |
for info_file in ${{ join(fromJson(steps.artifacts.outputs.installers).info_files, ' ') }}; do | |
delete_staging_branch $info_file '${{ steps.artifacts.outputs.installers }}' | |
done | |
- name: Retrieve | |
id: assets_retrieval | |
run: | | |
assets_folder=/tmp/assets && mkdir "$assets_folder" && cd "$assets_folder" | |
echo "artifact_name=downstream_assets_${{ github.run_id }}" >> $GITHUB_OUTPUT | |
if ${{ steps.artifacts.outputs.docs_archive != '' }}; then | |
unzip -d "documentation" ${{steps.artifacts.outputs.docs_archive}} | |
fi | |
function download_assets { | |
mkdir "$1" && cd "$1" | |
release_id=`echo "$2" | jq -r ".$1.release_id"` | |
while ! [ -f "$1.txt" ]; do | |
echo "Download $release_id assets..." | |
(gh release download $release_id -R ${{ github.repository }}) || true | |
sleep 30 | |
done | |
cd .. | |
} | |
for file in ${{ join(fromJson(steps.artifacts.outputs.docker_images).info_files, ' ') }}; do | |
download_assets $file '${{ steps.artifacts.outputs.docker_images }}' | |
done | |
for file in ${{ join(fromJson(steps.artifacts.outputs.python_wheels).info_files, ' ') }}; do | |
download_assets $file '${{ steps.artifacts.outputs.python_wheels }}' | |
done | |
for file in ${{ join(fromJson(steps.artifacts.outputs.installers).info_files, ' ') }}; do | |
download_assets $file '${{ steps.artifacts.outputs.installers }}' | |
done | |
env: | |
GH_TOKEN: ${{ secrets.REPO_BOT_ACCESS_TOKEN }} | |
- name: Upload assets | |
if: steps.artifacts.outputs.releases != '' | |
uses: actions/upload-artifact@v4 | |
with: | |
name: ${{ steps.assets_retrieval.outputs.artifact_name }} | |
path: /tmp/assets | |
retention-days: 1 | |
if-no-files-found: warn | |
cudaq_hpc: | |
name: CUDA Quantum Docker image | |
if: ${{ toJson(fromJson(needs.assets.outputs.docker_images).info_files) != '[]' }} | |
needs: assets | |
runs-on: ubuntu-latest | |
permissions: | |
contents: read | |
packages: write | |
id-token: write | |
environment: | |
name: ghcr-deployment | |
url: ${{ vars.deployment_url }} | |
strategy: | |
matrix: | |
info_file: ${{ fromJson(needs.assets.outputs.docker_images).info_files }} | |
fail-fast: false | |
steps: | |
- name: Checkout repository | |
uses: actions/checkout@v4 | |
with: | |
ref: ${{ inputs.github_commit || needs.assets.outputs.github_commit }} | |
- name: Log in to DockerHub | |
uses: docker/login-action@v3 | |
with: | |
username: ${{ secrets.DOCKERHUB_USERNAME }} | |
password: ${{ secrets.DOCKERHUB_READONLY_TOKEN }} | |
- name: Log in to GitHub CR | |
uses: docker/login-action@v3 | |
with: | |
registry: ghcr.io | |
username: ${{ github.actor }} | |
password: ${{ github.token }} | |
- name: Load docker assets | |
uses: actions/download-artifact@v4 | |
with: | |
name: ${{ needs.assets.outputs.retrieved_assets }} | |
path: /tmp/assets | |
- name: Retrieve assets | |
id: release_info | |
run: | | |
release_id=${{ fromJson(needs.assets.outputs.docker_images)[format('{0}', matrix.info_file)].release_id }} | |
assets_folder=assets && mkdir "$assets_folder" && cd "$assets_folder" | |
cp -r /tmp/assets/${{ matrix.info_file }}/. "$(pwd)" | |
if ${{ github.event_name == 'workflow_dispatch' && inputs.include_docs }}; then | |
mkdir documentation && mv -v /tmp/assets/documentation/* documentation | |
fi | |
build_info=${{ matrix.info_file }}* | |
platforms=`cat $build_info | grep -o 'platforms: \S*' | cut -d ' ' -f 2` | |
cudaqbase_image=`cat $build_info | grep -o 'cuda-quantum-image: \S*' | cut -d ' ' -f 2` | |
cudaqdev_image=`cat $build_info | grep -o 'cuda-quantum-dev-image: \S*' | cut -d ' ' -f 2` | |
cudaqdevdeps_image=`cat $build_info | grep -o 'cuda-quantum-devdeps-image: \S*' | cut -d ' ' -f 2` | |
for file in `ls *zip`; do unzip "$file" && rm "$file"; done && cd - | |
docker pull $cudaqbase_image | |
base_tag=`docker inspect $cudaqbase_image --format='{{json .Config.Labels}}' | jq -r '."org.opencontainers.image.version"'` | |
image_title=`docker inspect $cudaqbase_image --format='{{json .Config.Labels}}' | jq -r '."org.opencontainers.image.title"'` | |
image_description=`docker inspect $cudaqbase_image --format='{{json .Config.Labels}}' | jq -r '."org.opencontainers.image.description"'` | |
docker image rm $cudaqbase_image | |
docker image prune --force | |
registry=`echo $cudaqbase_image | rev | cut -d / -f2- | rev` | |
push_to_ngc=`([ "$registry" == "${registry#nvcr.io}" ] && echo false) || echo true` | |
echo "release_id=$release_id" >> $GITHUB_OUTPUT | |
echo "push_to_ngc=$push_to_ngc" >> $GITHUB_OUTPUT | |
echo "image_name=$registry/cuda-quantum" >> $GITHUB_OUTPUT | |
echo "image_tag=${base_tag%-base}" >> $GITHUB_OUTPUT | |
echo "image_title=$image_title" >> $GITHUB_OUTPUT | |
echo "image_description=$image_description" >> $GITHUB_OUTPUT | |
echo "platforms=$platforms" >> $GITHUB_OUTPUT | |
echo "cudaqbase_image=$cudaqbase_image" >> $GITHUB_OUTPUT | |
echo "cudaqdev_image=$cudaqdev_image" >> $GITHUB_OUTPUT | |
echo "cudaqdevdeps_image=$cudaqdevdeps_image" >> $GITHUB_OUTPUT | |
echo "assets_folder=$assets_folder" >> $GITHUB_OUTPUT | |
env: | |
GH_TOKEN: ${{ secrets.REPO_BOT_ACCESS_TOKEN }} | |
- name: Log in to default registry | |
if: steps.release_info.outputs.push_to_ngc != 'true' | |
uses: docker/login-action@v3 | |
with: | |
registry: ${{ vars.registry }} | |
username: ${{ github.actor }} | |
password: ${{ github.token }} | |
- name: Log in to NGC registry | |
if: steps.release_info.outputs.push_to_ngc == 'true' | |
uses: docker/login-action@v3 | |
with: | |
registry: 'nvcr.io' | |
username: '$oauthtoken' | |
password: ${{ secrets.NGC_CREDENTIALS }} | |
- name: Set up buildx runner | |
uses: docker/setup-buildx-action@v3 | |
- name: Extract cuda-quantum metadata | |
id: metadata | |
uses: docker/metadata-action@v5 | |
with: | |
images: ${{ steps.release_info.outputs.image_name }} | |
flavor: latest=false | |
tags: type=raw,value=${{ steps.release_info.outputs.image_tag }} | |
labels: | | |
org.opencontainers.image.title=${{ steps.release_info.outputs.image_title }} | |
org.opencontainers.image.description=${{ steps.release_info.outputs.image_description }} | |
- name: Build cuda-quantum image | |
id: cudaq_build | |
uses: docker/build-push-action@v5 | |
with: | |
context: . | |
file: ./docker/release/cudaq.ext.Dockerfile | |
build-args: | | |
base_image=${{ steps.release_info.outputs.cudaqbase_image }} | |
assets=${{ steps.release_info.outputs.assets_folder }} | |
vscode_config=docker/release/config/.vscode | |
tags: ${{ steps.metadata.outputs.tags }} | |
labels: ${{ steps.metadata.outputs.labels }} | |
platforms: ${{ steps.release_info.outputs.platforms }} | |
provenance: false | |
push: true | |
- name: Install Cosign | |
uses: sigstore/cosign-installer@v3.3.0 | |
with: | |
cosign-release: 'v2.2.2' | |
- name: Sign image with GitHub OIDC Token | |
if: steps.release_info.outputs.push_to_ngc != 'true' | |
env: | |
DIGEST: ${{ steps.cudaq_build.outputs.digest }} | |
TAGS: ${{ steps.metadata.outputs.tags }} | |
run: cosign sign --yes --recursive "${TAGS}@${DIGEST}" | |
- name: Install NGC CLI | |
if: steps.release_info.outputs.push_to_ngc == 'true' | |
uses: ./.github/actions/install-ngc-cli | |
with: | |
version: 3.31.0 | |
checksum: b715e503e2c0b44814a51f330eafd605f5d240ea0987bf615700d359c993f138 | |
- name: Sign image with NGC CLI | |
if: steps.release_info.outputs.push_to_ngc == 'true' | |
env: | |
TAGS: ${{ steps.metadata.outputs.tags }} | |
NGC_CLI_API_KEY: ${{ secrets.NGC_CREDENTIALS }} | |
NGC_CLI_ORG: ${{ github.repository_owner }} | |
NGC_CLI_TEAM: 'nightly' | |
run: | | |
echo "Signing ${TAGS}" | |
ngc-cli/ngc registry image publish --source ${TAGS} ${TAGS} --sign | |
- name: Update release information | |
run: | | |
release_id=${{ steps.release_info.outputs.release_id }} | |
gh release view $release_id --json body --jq .body > rel_notes.txt | |
header_length=`cat rel_notes.txt | grep -n "Release notes generated" | cut -d ':' -f 1` | |
head -n $(($header_length - 1)) rel_notes.txt > new_notes.txt | |
echo -e "\nImages for ${{ steps.release_info.outputs.platforms }}:\n" >> new_notes.txt | |
echo "- cuda-quantum (base image): ${{ steps.release_info.outputs.cudaqbase_image }}" >> new_notes.txt | |
echo "- cuda-quantum (with hpc components): ${{ steps.release_info.outputs.image_name }}@${{ steps.cudaq_build.outputs.digest }}" >> new_notes.txt | |
echo "- cuda-quantum-dev (for extension development): ${{ steps.release_info.outputs.cudaqdev_image }}" >> new_notes.txt | |
echo "- cuda-quantum-devdeps (development dependencies only): ${{ steps.release_info.outputs.cudaqdevdeps_image }}" >> new_notes.txt | |
(echo && tail -n +$header_length rel_notes.txt) >> new_notes.txt | |
gh release edit $release_id -R ${{ github.repository }} --notes-file new_notes.txt | |
env: | |
GH_TOKEN: ${{ secrets.REPO_BOT_ACCESS_TOKEN }} | |
- name: Configure validation | |
uses: cloudposse/github-action-matrix-outputs-write@1.0.0 | |
with: | |
matrix-step-name: docker_images | |
matrix-key: ${{ matrix.info_file }} | |
outputs: | | |
image_hash: ${{ steps.release_info.outputs.image_name }}@${{ steps.cudaq_build.outputs.digest }} | |
cudaq_installer: | |
name: CUDA Quantum installer | |
if: ${{ toJson(fromJson(needs.assets.outputs.installers).info_files) != '[]' }} | |
needs: assets | |
permissions: | |
contents: read | |
packages: write | |
id-token: write | |
environment: | |
name: ghcr-deployment | |
url: ${{ vars.deployment_url }} | |
strategy: | |
matrix: | |
info_file: ${{ fromJson(needs.assets.outputs.installers).info_files }} | |
fail-fast: false | |
runs-on: ${{ (contains(matrix.info_file, 'arm') && 'linux-arm64-cpu8') || 'linux-amd64-cpu8' }} | |
steps: | |
- name: Checkout repository | |
uses: actions/checkout@v4 | |
with: | |
ref: ${{ inputs.github_commit || needs.assets.outputs.github_commit }} | |
- name: Load build assets | |
uses: actions/download-artifact@v4 | |
with: | |
name: ${{ needs.assets.outputs.retrieved_assets }} | |
path: /tmp/assets | |
- name: Retrieve assets | |
id: release_info | |
run: | | |
release_id=${{ fromJson(needs.assets.outputs.installers)[format('{0}', matrix.info_file)].release_id }} | |
assets_folder=assets && mkdir "$assets_folder" && cd "$assets_folder" | |
cp -r /tmp/assets/${{ matrix.info_file }}/. "$(pwd)" | |
build_info=${{ matrix.info_file }}* | |
platform=`cat $build_info | grep -o 'platform: \S*' | cut -d ' ' -f 2` | |
platform_id=`echo $platform | sed 's/linux\///g' | tr -d ' '` | |
platform_arch=`([ "$platform_id" == "amd64" ] && echo x86_64) || ([ "$platform_id" == "arm64" ] && echo aarch64) || echo any` | |
assets_image=`cat $build_info | grep -o 'cuda-quantum-assets-image: \S*' | cut -d ' ' -f 2` | |
platform_base_image=`cat $build_info | grep -o 'platform-base-image: \S*' | cut -d ' ' -f 2` | |
openmpi_buildcache=`cat $build_info | grep -o 'openmpi-build-cache: \S*' | cut -d ' ' -f 2` | |
for file in `ls *zip`; do unzip "$file" && rm "$file"; done && cd - | |
echo "release_id=$release_id" >> $GITHUB_OUTPUT | |
echo "platform=$platform" >> $GITHUB_OUTPUT | |
echo "platform_arch=$platform_arch" >> $GITHUB_OUTPUT | |
echo "assets_image=$assets_image" >> $GITHUB_OUTPUT | |
echo "platform_base_image=$platform_base_image" >> $GITHUB_OUTPUT | |
echo "openmpi_buildcache=$openmpi_buildcache" >> $GITHUB_OUTPUT | |
env: | |
GH_TOKEN: ${{ secrets.REPO_BOT_ACCESS_TOKEN }} | |
- name: Log in to DockerHub | |
uses: docker/login-action@v3 | |
with: | |
username: ${{ secrets.DOCKERHUB_USERNAME }} | |
password: ${{ secrets.DOCKERHUB_READONLY_TOKEN }} | |
- name: Log in to default registry | |
if: steps.release_info.outputs.push_to_ngc != 'true' | |
uses: docker/login-action@v3 | |
with: | |
registry: ${{ vars.registry }} | |
username: ${{ github.actor }} | |
password: ${{ github.token }} | |
- name: Set up buildx runner | |
uses: docker/setup-buildx-action@v3 | |
- name: Build installer | |
uses: docker/build-push-action@v5 | |
with: | |
context: . | |
file: ./docker/release/installer.Dockerfile | |
build-args: | | |
base_image=${{ steps.release_info.outputs.assets_image }} | |
additional_components=assets | |
platforms: ${{ steps.release_info.outputs.platform }} | |
outputs: type=local,dest=/tmp/install | |
- name: Update release information | |
run: | | |
release_id=${{ steps.release_info.outputs.release_id }} | |
gh release view $release_id --json body --jq .body > rel_notes.txt | |
header_length=`cat rel_notes.txt | grep -n "Release notes generated" | cut -d ':' -f 1` | |
head -n $(($header_length - 1)) rel_notes.txt > new_notes.txt | |
echo -e "\nImage to create ${{ steps.release_info.outputs.platform }} installer:\n" >> new_notes.txt | |
echo "- cuda-quantum-assets: ${{ steps.release_info.outputs.assets_image }}" >> new_notes.txt | |
(echo && tail -n +$header_length rel_notes.txt) >> new_notes.txt | |
gh release edit $release_id -R ${{ github.repository }} --notes-file new_notes.txt | |
env: | |
GH_TOKEN: ${{ secrets.REPO_BOT_ACCESS_TOKEN }} | |
- name: Upload installer | |
uses: actions/upload-artifact@v4 | |
with: | |
name: ${{ steps.release_info.outputs.platform_arch }}-installer | |
path: /tmp/install | |
retention-days: 1 | |
if-no-files-found: error | |
cudaq_wheels: | |
name: CUDA Quantum Python wheels | |
if: ${{ toJson(fromJson(needs.assets.outputs.python_wheels).info_files) != '[]' }} | |
needs: assets | |
permissions: | |
contents: read | |
environment: | |
name: ghcr-deployment | |
url: ${{ vars.deployment_url }} | |
strategy: | |
matrix: | |
info_file: ${{ fromJson(needs.assets.outputs.python_wheels).info_files }} | |
python_version: ['3.8', '3.9', '3.10', '3.11'] | |
fail-fast: false | |
runs-on: ${{ (contains(matrix.info_file, 'arm') && 'linux-arm64-cpu8') || 'linux-amd64-cpu8' }} | |
steps: | |
- name: Checkout repository | |
uses: actions/checkout@v4 | |
with: | |
ref: ${{ inputs.github_commit || needs.assets.outputs.github_commit }} | |
- name: Load wheel assets | |
uses: actions/download-artifact@v4 | |
with: | |
name: ${{ needs.assets.outputs.retrieved_assets }} | |
path: /tmp/assets | |
- name: Retrieve assets | |
id: release_info | |
run: | | |
release_id=${{ fromJson(needs.assets.outputs.python_wheels)[format('{0}', matrix.info_file)].release_id }} | |
assets_folder=$(pwd)/assets && mkdir "$assets_folder" | |
cd /tmp/assets/${{ matrix.info_file }} | |
build_info=${{ matrix.info_file }}* | |
platform=`cat $build_info | grep -o 'platform: \S*' | cut -d ' ' -f 2` | |
platform_id=`echo $platform | sed 's/linux\///g' | tr -d ' '` | |
platform_arch=`([ "$platform_id" == "amd64" ] && echo x86_64) || ([ "$platform_id" == "arm64" ] && echo aarch64) || echo any` | |
cudaqwheeldeps_image=`cat $build_info | grep -o 'cuda-quantum-wheeldeps-image: \S*' | cut -d ' ' -f 2` | |
for file in `ls *zip`; do unzip -j "$file" -d "$assets_folder" && rm "$file"; done | |
echo "release_id=$release_id" >> $GITHUB_OUTPUT | |
echo "platform=$platform" >> $GITHUB_OUTPUT | |
echo "platform_arch=$platform_arch" >> $GITHUB_OUTPUT | |
echo "cudaqwheeldeps_image=$cudaqwheeldeps_image" >> $GITHUB_OUTPUT | |
echo "assets_folder=$assets_folder" >> $GITHUB_OUTPUT | |
echo "docker_output=type=local,dest=/tmp/wheels" >> $GITHUB_OUTPUT | |
env: | |
GH_TOKEN: ${{ secrets.REPO_BOT_ACCESS_TOKEN }} | |
- name: Log in to DockerHub | |
uses: docker/login-action@v3 | |
with: | |
username: ${{ secrets.DOCKERHUB_USERNAME }} | |
password: ${{ secrets.DOCKERHUB_READONLY_TOKEN }} | |
- name: Set up context for buildx | |
run: | | |
docker context create builder_context | |
- name: Set up buildx runner | |
uses: docker/setup-buildx-action@v3 | |
with: | |
endpoint: builder_context | |
driver-opts: network=host | |
- name: Build cuda-quantum wheel | |
id: build_wheel | |
uses: docker/build-push-action@v5 | |
with: | |
context: . | |
file: ./docker/release/cudaq.wheel.Dockerfile | |
build-args: | | |
base_image=${{ steps.release_info.outputs.cudaqwheeldeps_image }} | |
release_version=${{ needs.assets.outputs.cudaq_version }} | |
python_version=${{ matrix.python_version }} | |
outputs: ${{ steps.release_info.outputs.docker_output }} | |
- name: Set retention days | |
id: set_retention_days | |
run: | | |
# Save the x86_64-py3.10-wheels for longer because our some of our | |
# nightly jobs rely on them being present, even if we haven't done a | |
# fresh publishing in the last 24 hours. | |
if [ "${{ steps.release_info.outputs.platform_arch }}" == "x86_64" ] && [ "${{ matrix.python_version }}" == "3.10" ]; then | |
echo "retention_days=7" >> $GITHUB_OUTPUT | |
else | |
echo "retention_days=1" >> $GITHUB_OUTPUT | |
fi | |
- name: Upload wheels | |
uses: actions/upload-artifact@v4 | |
with: | |
name: ${{ steps.release_info.outputs.platform_arch }}-py${{ matrix.python_version }}-wheels | |
path: /tmp/wheels | |
retention-days: ${{ steps.set_retention_days.outputs.retention_days }} | |
if-no-files-found: error | |
cudaq_wheels_release_info: | |
name: Update release info of CUDA Quantum Python wheels | |
needs: [assets, cudaq_wheels] | |
runs-on: ubuntu-latest | |
strategy: | |
matrix: | |
info_file: ${{ fromJson(needs.assets.outputs.python_wheels).info_files }} | |
fail-fast: false | |
steps: | |
- name: Update release information | |
run: | | |
release_id=${{ fromJson(needs.assets.outputs.python_wheels)[format('{0}', matrix.info_file)].release_id }} | |
platform=${{ fromJson(needs.assets.outputs.python_wheels)[format('{0}', matrix.info_file)].platform }} | |
cudaqwheeldeps_image=${{ fromJson(needs.assets.outputs.python_wheels)[format('{0}', matrix.info_file)].image_hash }} | |
gh release view $release_id -R ${{ github.repository }} --json body --jq .body > rel_notes.txt | |
header_length=`cat rel_notes.txt | grep -n "Release notes generated" | cut -d ':' -f 1` | |
head -n $(($header_length - 1)) rel_notes.txt > new_notes.txt | |
echo -e "\nImage to create $platform wheels:\n" >> new_notes.txt | |
echo "- cuda-quantum-devdeps (development dependencies only): $cudaqwheeldeps_image" >> new_notes.txt | |
(echo && tail -n +$header_length rel_notes.txt) >> new_notes.txt | |
gh release edit $release_id -R ${{ github.repository }} --notes-file new_notes.txt | |
env: | |
GH_TOKEN: ${{ secrets.REPO_BOT_ACCESS_TOKEN }} | |
config: | |
name: Configure validation | |
needs: [cudaq_hpc, cudaq_installer, cudaq_wheels] | |
runs-on: ubuntu-latest | |
outputs: | |
docker_images: ${{ steps.validation.outputs.docker_images }} | |
steps: | |
- name: Get matrix job output | |
id: read_json | |
uses: cloudposse/github-action-matrix-outputs-read@1.0.0 | |
with: | |
matrix-step-name: docker_images | |
- name: Prepare validation | |
id: validation | |
run: | | |
images=`echo '${{ steps.read_json.outputs.result }}' | jq '[.image_hash[] | select(. != "")]'` | |
echo "docker_images=$(echo $images)" >> $GITHUB_OUTPUT | |
installer_validation: | |
name: Installer validation | |
needs: [assets, cudaq_installer] | |
runs-on: linux-amd64-gpu-v100-latest-1 | |
strategy: | |
matrix: | |
os_image: ['redhat/ubi8:8.0', 'ubuntu:22.04'] | |
fail-fast: false | |
container: | |
image: ${{ matrix.os_image }} | |
options: --user root | |
env: | |
NVIDIA_VISIBLE_DEVICES: ${{ env.NVIDIA_VISIBLE_DEVICES }} | |
TERM: xterm | |
steps: | |
- name: Checkout repository | |
uses: actions/checkout@v4 | |
with: | |
ref: ${{ inputs.github_commit || needs.assets.outputs.github_commit }} | |
- name: Load installer | |
uses: actions/download-artifact@v4 | |
with: | |
name: x86_64-installer | |
path: /tmp/install | |
- name: Runtime dependencies (apt) | |
if: startsWith(matrix.os_image, 'ubuntu') | |
run: | | |
apt-get update && apt-get install -y --no-install-recommends \ | |
wget ca-certificates libc6-dev libopenmpi-dev | |
distro=`echo ${{ matrix.os_image }} | tr -d . | tr -d :` | |
CUDA_DOWNLOAD_URL=https://developer.download.nvidia.com/compute/cuda/repos | |
wget "${CUDA_DOWNLOAD_URL}/$distro/x86_64/cuda-keyring_1.1-1_all.deb" | |
dpkg -i cuda-keyring_1.1-1_all.deb | |
apt-get update && apt-get install -y --no-install-recommends \ | |
libcublas-11-8 cuda-cudart-11-8 libcusolver-11-8 | |
- name: Runtime dependencies (dnf) | |
if: startsWith(matrix.os_image, 'redhat') | |
run: | | |
dnf install -y --nobest --setopt=install_weak_deps=False \ | |
'dnf-command(config-manager)' glibc-devel openssh-clients | |
. scripts/configure_build.sh install-cudart | |
# We need to install an MPI implementation, otherwise nothing | |
# will be able to run on the nvidia-mgpu backend. | |
# The easiest is to just install it via conda. | |
dnf install -y --nobest --setopt=install_weak_deps=False wget | |
mkdir -p ~/.miniconda3 | |
wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-$(uname -m).sh -O ~/.miniconda3/miniconda.sh | |
bash ~/.miniconda3/miniconda.sh -b -u -p ~/.miniconda3 | |
rm -rf ~/.miniconda3/miniconda.sh | |
eval "$(~/.miniconda3/bin/conda shell.bash hook)" | |
conda install -y -c conda-forge openmpi | |
echo 'eval "$(~/.miniconda3/bin/conda shell.bash hook)"' >> ~/.bashrc | |
echo 'export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$CONDA_PREFIX/lib"' >> ~/.bashrc | |
echo "export OMPI_MCA_opal_cuda_support=true OMPI_MCA_btl='^openib'" >> ~/.bashrc | |
- name: Install and run sanity checks | |
shell: bash | |
env: | |
BASH_ENV: ~/.bashrc | |
run: | | |
chmod +x /tmp/install/install_cuda_quantum.* | |
/tmp/install/install_cuda_quantum.* --accept | |
rm -rf examples && mv docs/sphinx/examples examples && rm -rf examples/python | |
GITHUB_STEP_SUMMARY=$GITHUB_STEP_SUMMARY \ | |
bash -l scripts/validate_container.sh | tee /tmp/validation.out | |
# Check that the tests included the nvidia-mgpu backend: | |
relevant_line=`grep -n "Testing backends:" /tmp/validation.out | cut -d : -f1` | |
tested_backends=`cat /tmp/validation.out | tail -n +$relevant_line | sed -e '/^$/,$d'` | |
if [ -z "$(echo $tested_backends | grep nvidia-mgpu)" ]; then | |
echo "::error::Missing tests for nvidia-mgpu backend." | |
exit 1 | |
fi | |
wheel_validation: | |
name: Python wheel instructions validation | |
needs: [assets, cudaq_wheels] | |
runs-on: linux-amd64-gpu-v100-latest-1 | |
container: | |
image: ubuntu:22.04 | |
options: --user root | |
env: | |
NVIDIA_VISIBLE_DEVICES: ${{ env.NVIDIA_VISIBLE_DEVICES }} | |
TERM: xterm | |
steps: | |
- name: Checkout repository | |
uses: actions/checkout@v4 | |
with: | |
ref: ${{ inputs.github_commit || needs.assets.outputs.github_commit }} | |
- name: Load wheel | |
uses: actions/download-artifact@v4 | |
with: | |
name: x86_64-py3.10-wheels | |
path: /tmp/install | |
- name: Run simple validation | |
shell: bash | |
run: | | |
apt-get update && apt-get install -y --no-install-recommends ca-certificates vim wget openssh-client | |
cd /tmp | |
# Setup links for validate_wheel.sh script | |
ln -s $GITHUB_WORKSPACE/scripts/validate_wheel.sh . | |
ln -s $GITHUB_WORKSPACE/docs/sphinx/examples/python /tmp/examples | |
ln -s $GITHUB_WORKSPACE/docs/sphinx/snippets/python /tmp/snippets | |
ln -s $GITHUB_WORKSPACE/python/tests /tmp/tests | |
ln -s $GITHUB_WORKSPACE/python/README.md . | |
# Run the script w/ -q to run a shortened test | |
set +e # Allow script to keep going through errors (needed for skipped tests) | |
source validate_wheel.sh -w /tmp/install/cuda_quantum-*.whl -f /tmp -p 3.10 | |
set -e # Re-enable exit code error checking | |
if [ "$status_sum" -ne "0" ]; then | |
echo "::error::Error running validation script" | |
exit $status_sum | |
fi | |
wheel_validation_x86_simple: | |
name: Simple Python wheel installation validation | |
needs: [assets, cudaq_wheels] | |
runs-on: linux-amd64-gpu-v100-latest-1 | |
container: | |
image: ubuntu:22.04 | |
options: --user root | |
env: | |
NVIDIA_VISIBLE_DEVICES: ${{ env.NVIDIA_VISIBLE_DEVICES }} | |
TERM: xterm | |
steps: | |
- name: Checkout repository | |
uses: actions/checkout@v4 | |
with: | |
ref: ${{ inputs.github_commit || needs.assets.outputs.github_commit }} | |
- name: Load wheel | |
uses: actions/download-artifact@v4 | |
with: | |
name: x86_64-py3.10-wheels | |
path: /tmp/install | |
- name: Run simple x86 validation | |
shell: bash | |
run: | | |
# These simple steps are only expected to work for x86 and only for | |
# targets and test cases that don't require MPI. | |
# Create clean python3 environment. | |
apt-get update && apt-get install -y --no-install-recommends python3 python3-pip | |
pip install /tmp/install/cuda_quantum-*.whl | |
status_sum=0 | |
set +e # Allow script to keep going through errors | |
# Verify that the necessary GPU targets are installed and usable | |
# Note nvidia-mgpu requires MPI, so it is not available with this method. | |
for tgt in nvidia nvidia-fp64 tensornet; do | |
echo "Running with target ${tgt}" | |
python3 docs/sphinx/examples/python/intro.py --target ${tgt} | |
if [ $? -ne 0 ]; then | |
echo -e "\e[01;31mPython trivial test for target ${tgt} failed.\e[0m" >&2 | |
status_sum=$((status_sum+1)) | |
fi | |
done | |
set -e # Re-enable exit code error checking | |
if [ "$status_sum" -ne "0" ]; then | |
echo "::error::Error running validation script" | |
exit $status_sum | |
fi | |
image_validation: | |
name: Docker image validation | |
needs: [assets, config] | |
runs-on: linux-amd64-gpu-v100-latest-1 | |
permissions: | |
contents: read | |
packages: read | |
strategy: | |
matrix: | |
image_hash: ${{ fromJson(needs.config.outputs.docker_images) }} | |
fail-fast: false | |
container: | |
image: ${{ matrix.image_hash }} | |
options: --user root # otherwise step summary doesn't work | |
env: | |
NVIDIA_VISIBLE_DEVICES: ${{ env.NVIDIA_VISIBLE_DEVICES }} | |
TERM: xterm | |
steps: | |
- name: Checkout repository | |
uses: actions/checkout@v4 | |
with: | |
ref: ${{ inputs.github_commit || needs.assets.outputs.github_commit }} | |
path: github-repo | |
sparse-checkout: | | |
scripts | |
docs | |
- name: Basic validation (GPU backends) | |
shell: bash | |
run: | | |
backends_to_test=`\ | |
for file in $(ls $CUDA_QUANTUM_PATH/targets/*.yml); \ | |
do | |
if [ -n "$(cat $file | grep "gpu-requirements")" ]; then \ | |
basename $file | cut -d "." -f 1; \ | |
elif [ -n "$(basename $file | grep mqpu)" ]; then \ | |
echo remote-mqpu; \ | |
fi; \ | |
done` | |
rm -rf examples && mv github-repo/docs/sphinx/examples examples | |
mv github-repo/docs/notebook_validation.py . | |
GITHUB_STEP_SUMMARY=$GITHUB_STEP_SUMMARY \ | |
bash github-repo/scripts/validate_container.sh $backends_to_test | tee /tmp/validation.out | |
# Check that the tests included the nvidia-mgpu backend: | |
relevant_line=`grep -n "Testing backends:" /tmp/validation.out | cut -d : -f1` | |
tested_backends=`cat /tmp/validation.out | tail -n +$relevant_line | sed -e '/^$/,$d'` | |
if [ -z "$(echo $tested_backends | grep nvidia-mgpu)" ]; then | |
echo "::error::Missing tests for nvidia-mgpu backend." | |
exit 1 | |
fi | |
- name: MPI validation | |
shell: bash | |
run: | | |
status_sum=0 && set +e # Allow script to keep going through errors | |
for ex in `find /home/cudaq/examples/other/distributed/ -name '*.cpp'`; do | |
# Set CUDAQ_ENABLE_MPI_EXAMPLE to activate these examples. | |
nvq++ -DCUDAQ_ENABLE_MPI_EXAMPLE=1 $ex | |
status=$? | |
if [ $status -eq 0 ]; then | |
# Run with mpiexec | |
mpiexec --allow-run-as-root -np 4 ./a.out | |
status=$? | |
filename=$(basename -- "$ex") | |
if [ $status -eq 0 ]; then | |
echo ":white_check_mark: Successfully ran $filename." >> $GITHUB_STEP_SUMMARY | |
else | |
echo ":x: Failed to execute $filename." >> $GITHUB_STEP_SUMMARY | |
status_sum=$((status_sum+1)) | |
fi | |
else | |
echo ":x: Compilation failed for $filename." >> $GITHUB_STEP_SUMMARY | |
status_sum=$((status_sum+1)) | |
fi | |
done | |
set -e # Re-enable exit code error checking | |
if [ ! $status_sum -eq 0 ]; then | |
echo "::error::$status_sum examples failed; see step summary for a list of failures." | |
exit $status_sum | |
fi | |
create_release: | |
name: CUDA Quantum Release | |
needs: [assets, cudaq_hpc, cudaq_installer, cudaq_wheels] | |
if: needs.assets.outputs.release_title && inputs.github_commit == '' && inputs.assets_from_run == '' && inputs.nvidia_mgpu_commit == '' | |
runs-on: ubuntu-latest | |
environment: | |
name: ghcr-deployment | |
url: ${{ vars.deployment_url }} | |
steps: | |
- name: Download cuda-quantum wheels | |
uses: actions/download-artifact@v4 | |
with: | |
pattern: '*-wheels' | |
path: wheelhouse | |
- name: Download CUDA Quantum installer | |
uses: actions/download-artifact@v4 | |
with: | |
pattern: '*-installer' | |
path: installers | |
# The python wheels are uploaded as a release asset, but not pushed to anywhere else. | |
# Note that PyPI packages cannot be updated once pushed; | |
# - We could upload wheels to test-pypi when creating a release. | |
# - The push to pypi itself should be done manually. | |
# See also: | |
# https://github.com/pypa/gh-action-pypi-publish | |
# https://packaging.python.org/en/latest/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows/ | |
- name: Create release | |
run: | | |
for installer in `find installers/ -type f -not -name '*.whl'`; do | |
mv -v "$installer" "$(basename -- "$installer")" | |
done | |
for dir in `ls wheelhouse/`; do | |
mv -v "wheelhouse/$dir"/* wheelhouse/ && rmdir "wheelhouse/$dir" | |
done | |
zip -r wheelhouse.zip wheelhouse | |
release_id=${{ inputs.assets_from_run || github.run_id }} | |
release_title="${{ needs.assets.outputs.release_title }}" | |
github_commit=${{ needs.assets.outputs.github_commit }} | |
version=${{ needs.assets.outputs.release_version }} | |
versions=`gh release list -R ${{ github.repository }} --exclude-drafts --exclude-pre-releases | egrep -o "([0-9]{1,}\.)+[0-9]{1,}\S*" | sort -r -V` | |
latest_tag=`echo $versions | cut -d ' ' -f 1` | |
prerelease=`([ "$(echo $version | egrep -o '([0-9]{1,}\.)+[0-9]{1,}')" == "$version" ] && echo '') || echo '--prerelease'` | |
rel_notes="Release created by workflow [${{ github.run_id }}](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})." | |
rel_notes+=$(echo "<br/>GitHub commit [$github_commit](https://github.com/${{ github.repository }}/tree/$github_commit)") | |
gh release create $release_id --title $release_id -R ${{ github.repository }} \ | |
--target $github_commit --draft $prerelease \ | |
--generate-notes --notes-start-tag $latest_tag --notes "$rel_notes" | |
gh release upload $release_id -R ${{ github.repository }} wheelhouse.zip --clobber | |
gh release upload $release_id -R ${{ github.repository }} install_cuda_quantum.* --clobber | |
gh release edit $release_id -R ${{ github.repository }} \ | |
--title "$release_title" --tag $version $prerelease # --draft=false | |
env: | |
GH_TOKEN: ${{ secrets.REPO_BOT_ACCESS_TOKEN }} | |
clean_up: | |
name: Clean up | |
needs: [assets, cudaq_hpc, cudaq_installer, cudaq_wheels, wheel_validation, wheel_validation_x86_simple, create_release, installer_validation, image_validation] | |
# Force this job to run even when some of the dependencies above are skipped. | |
if: always() && !cancelled() && needs.assets.result != 'skipped' && !contains(needs.*.result, 'failure') && !contains(needs.*.result, 'cancelled') | |
runs-on: ubuntu-latest | |
environment: | |
name: ghcr-deployment | |
url: ${{ vars.deployment_url }} | |
steps: | |
- name: Clean up draft releases | |
id: cleanup | |
run: | | |
# Delete the draft release(s) used to exchange data between GitLab and GitHub. | |
for release_id in `echo ${{ needs.assets.outputs.releases }}`; do | |
if [ -n "$(gh release list -R ${{ github.repository }} | grep -s $release_id)" ]; then | |
echo "## Draft Release $release_id:" >> $GITHUB_STEP_SUMMARY | |
gh release view $release_id -R ${{ github.repository }} --json body --jq .body > rel_notes.txt | |
header_length=`cat rel_notes.txt | grep -n "Release notes generated" | cut -d ':' -f 1` | |
head -n $(($header_length - 1)) rel_notes.txt >> $GITHUB_STEP_SUMMARY | |
gh release delete $release_id -R ${{ github.repository }} -y | |
echo "Deleted release $release_id." | |
fi | |
done | |
env: | |
GH_TOKEN: ${{ secrets.REPO_BOT_ACCESS_TOKEN }} | |
- name: Delete artifacts | |
uses: actions/github-script@v7 | |
with: | |
script: | | |
const res = await github.rest.actions.listArtifactsForRepo({ | |
owner: context.repo.owner, | |
repo: context.repo.repo, | |
}) | |
res.data.artifacts | |
.filter(({ name }) => name === '${{ needs.assets.outputs.retrieved_assets }}') | |
.forEach(({ id }) => { | |
github.rest.actions.deleteArtifact({ | |
owner: context.repo.owner, | |
repo: context.repo.repo, | |
artifact_id: id, | |
}) | |
}) |