From 193a0b66660201358bf238ff0270c01b52266239 Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Fri, 19 Jan 2024 17:52:12 -0500 Subject: [PATCH] UPSTREAM: : transfer actions from dsp tekton. Signed-off-by: Humair Khan --- .../workflows/add-issues-to-dsp-project.yml | 14 + .../workflows/add-issues-to-odh-project.yml | 14 + .github/workflows/build-images.yaml | 108 +++++++ .github/workflows/build-master.yml | 77 +++++ .github/workflows/build-prs-trigger.yaml | 33 +++ .github/workflows/build-prs.yml | 267 ++++++++++++++++++ .github/workflows/tag-release-quay.yml | 86 ++++++ 7 files changed, 599 insertions(+) create mode 100644 .github/workflows/add-issues-to-dsp-project.yml create mode 100644 .github/workflows/add-issues-to-odh-project.yml create mode 100644 .github/workflows/build-images.yaml create mode 100644 .github/workflows/build-master.yml create mode 100644 .github/workflows/build-prs-trigger.yaml create mode 100644 .github/workflows/build-prs.yml create mode 100644 .github/workflows/tag-release-quay.yml diff --git a/.github/workflows/add-issues-to-dsp-project.yml b/.github/workflows/add-issues-to-dsp-project.yml new file mode 100644 index 000000000000..11730ab11ad9 --- /dev/null +++ b/.github/workflows/add-issues-to-dsp-project.yml @@ -0,0 +1,14 @@ +name: Add Issues to ODH Data Science Pipelines Planning Project +on: + issues: + types: + - opened +jobs: + add-to-project: + name: Add issue to project + runs-on: ubuntu-latest + steps: + - uses: actions/add-to-project@v0.5.0 + with: + project-url: https://github.com/orgs/opendatahub-io/projects/43 + github-token: ${{ secrets.GH_TOKEN_PROJECT_EDIT }} diff --git a/.github/workflows/add-issues-to-odh-project.yml b/.github/workflows/add-issues-to-odh-project.yml new file mode 100644 index 000000000000..e84b61e38ebf --- /dev/null +++ b/.github/workflows/add-issues-to-odh-project.yml @@ -0,0 +1,14 @@ +name: Add Issues to ODH Feature Tracking Project +on: + issues: + types: + - opened +jobs: + add-to-project: + name: Add issue to project + runs-on: ubuntu-latest + steps: + - uses: actions/add-to-project@v0.5.0 + with: + project-url: https://github.com/orgs/opendatahub-io/projects/40 + github-token: ${{ secrets.GH_TOKEN_PROJECT_EDIT }} diff --git a/.github/workflows/build-images.yaml b/.github/workflows/build-images.yaml new file mode 100644 index 000000000000..200212f85c77 --- /dev/null +++ b/.github/workflows/build-images.yaml @@ -0,0 +1,108 @@ +name: Build images from sources. +run-name: Build images from sources. +on: + workflow_call: + inputs: + src_branch: + type: string + default: 'v1.0.x' + description: 'Source branch to build DSP from' + required: true + target_tag: + type: string + default: 'vx.y.z' + description: 'Target Image Tag' + required: true + quay_org: + type: string + default: 'opendatahub' + description: 'Quay Organization' + required: true + overwrite_imgs: + type: string + default: 'true' + description: 'Overwrite images in quay if they already exist for this release.' + required: true + fail_fast: + type: string + default: 'true' + description: 'Stop running entire Workflow if a single build fails' + required: true + + workflow_dispatch: + inputs: + src_branch: + type: string + default: 'v1.0.x' + description: 'Source branch to build DSP from' + required: true + target_tag: + type: string + default: 'vx.y.z' + description: 'Target Image Tag' + required: true + quay_org: + type: string + default: 'opendatahub' + description: 'Quay Organization' + required: true + overwrite_imgs: + type: string + default: 'true' + description: 'Overwrite images in quay if they already exist for this release.' + required: true + fail_fast: + type: string + default: 'true' + description: 'Stop running entire Workflow if a single build fails' + required: true +env: + SOURCE_BRANCH: ${{ inputs.src_branch }} + QUAY_ORG: ${{ inputs.quay_org }} + QUAY_ID: ${{ secrets.QUAY_ROBOT_USERNAME }} + QUAY_TOKEN: ${{ secrets.QUAY_ROBOT_TOKEN }} + TARGET_IMAGE_TAG: ${{ inputs.target_tag }} + OVERWRITE_IMAGES: ${{ inputs.overwrite_imgs }} +jobs: + build-images-with-tag: + continue-on-error: false + runs-on: ubuntu-latest + permissions: + contents: read + strategy: + fail-fast: ${{ inputs.fail_fast == 'true' }} + matrix: + include: + - image: ds-pipelines-api-server + dockerfile: backend/Dockerfile + - image: ds-pipelines-frontend + dockerfile: frontend/Dockerfile + - image: ds-pipelines-cacheserver + dockerfile: backend/Dockerfile.cacheserver + - image: ds-pipelines-persistenceagent + dockerfile: backend/Dockerfile.persistenceagent + - image: ds-pipelines-scheduledworkflow + dockerfile: backend/Dockerfile.scheduledworkflow + - image: ds-pipelines-viewercontroller + dockerfile: backend/Dockerfile.viewercontroller + - image: ds-pipelines-artifact-manager + dockerfile: backend/artifact_manager/Dockerfile + - image: ds-pipelines-metadata-writer + dockerfile: backend/metadata_writer/Dockerfile + - image: ds-pipelines-metadata-grpc + dockerfile: third_party/ml-metadata/Dockerfile + - image: ds-pipelines-metadata-envoy + dockerfile: third_party/metadata_envoy/Dockerfile + - image: ds-pipelines-driver + dockerfile: backend/Dockerfile.driver + - image: ds-pipelines-launcher + dockerfile: backend/Dockerfile.launcher + steps: + - uses: actions/checkout@v3 + - uses: ./.github/actions/build + name: Build Image + with: + IMAGE_REPO: ${{ matrix.image }} + DOCKERFILE: ${{ matrix.dockerfile }} + GH_REPO: ${{ github.repository }} + OVERWRITE: ${{ env.OVERWRITE_IMAGES }} diff --git a/.github/workflows/build-master.yml b/.github/workflows/build-master.yml new file mode 100644 index 000000000000..0c2e21a1bf0b --- /dev/null +++ b/.github/workflows/build-master.yml @@ -0,0 +1,77 @@ +name: Build images for Master branch +on: + push: + branches: + - master +concurrency: + group: ${{ github.workflow }} + cancel-in-progress: true +env: + QUAY_ORG: opendatahub + QUAY_ID: ${{ secrets.QUAY_ROBOT_USERNAME }} + QUAY_TOKEN: ${{ secrets.QUAY_ROBOT_TOKEN }} + SOURCE_BRANCH: master +jobs: + build-master-images: + continue-on-error: false + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + include: + - image: ds-pipelines-api-server + dockerfile: backend/Dockerfile + - image: ds-pipelines-frontend + dockerfile: frontend/Dockerfile + - image: ds-pipelines-cacheserver + dockerfile: backend/Dockerfile.cacheserver + - image: ds-pipelines-persistenceagent + dockerfile: backend/Dockerfile.persistenceagent + - image: ds-pipelines-scheduledworkflow + dockerfile: backend/Dockerfile.scheduledworkflow + - image: ds-pipelines-viewercontroller + dockerfile: backend/Dockerfile.viewercontroller + - image: ds-pipelines-artifact-manager + dockerfile: backend/artifact_manager/Dockerfile + - image: ds-pipelines-metadata-writer + dockerfile: backend/metadata_writer/Dockerfile + - image: ds-pipelines-metadata-grpc + dockerfile: third_party/ml-metadata/Dockerfile + - image: ds-pipelines-metadata-envoy + dockerfile: third_party/metadata_envoy/Dockerfile + - image: ds-pipelines-driver + dockerfile: backend/Dockerfile.driver + - image: ds-pipelines-launcher + dockerfile: backend/Dockerfile.launcher + steps: + - uses: actions/checkout@v3 + - name: Generate Tag + shell: bash + id: tags + env: + SOURCE_BRANCH: ${{ env.SOURCE_BRANCH }} + run: | + commit_sha=${{ github.event.after }} + tag=${SOURCE_BRANCH}-${commit_sha:0:7} + echo "tag=${tag}" >> $GITHUB_OUTPUT + - name: Build Image + uses: ./.github/actions/build + env: + IMG: quay.io/${{ env.QUAY_ORG }}/${{ matrix.image }}:${{ steps.tags.outputs.tag }} + TARGET_IMAGE_TAG: ${{ steps.tags.outputs.tag }} + with: + OVERWRITE: true + IMAGE_REPO: ${{ matrix.image }} + DOCKERFILE: ${{ matrix.dockerfile }} + GH_REPO: ${{ github.repository }} + - name: Tag latest + shell: bash + env: + IMG: quay.io/${{ env.QUAY_ORG }}/${{ matrix.image }} + NEWEST_TAG: ${{ steps.tags.outputs.tag }} + SOURCE_BRANCH: ${{ env.SOURCE_BRANCH }} + run: | + podman tag ${IMG}:${NEWEST_TAG} ${IMG}:latest + podman push ${IMG}:latest + podman tag ${IMG}:${NEWEST_TAG} ${IMG}:${SOURCE_BRANCH} + podman push ${IMG}:${SOURCE_BRANCH} diff --git a/.github/workflows/build-prs-trigger.yaml b/.github/workflows/build-prs-trigger.yaml new file mode 100644 index 000000000000..58337dc70e06 --- /dev/null +++ b/.github/workflows/build-prs-trigger.yaml @@ -0,0 +1,33 @@ +name: Trigger build images for PRs +on: + pull_request: + paths-ignore: + - 'docs/**' + - 'guides/**' + - 'images/**' + - '**/README.md' + types: + - opened + - reopened + - closed + - synchronize +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true +jobs: + upload-data: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Save PR payload + shell: bash + run: | + mkdir -p ./pr + echo ${{ github.event.pull_request.number }} >> ./pr/pr_number + echo ${{ github.event.pull_request.state }} >> ./pr/pr_state + echo ${{ github.event.pull_request.head.sha }} >> ./pr/head_sha + echo ${{ github.event.action }} >> ./pr/event_action + - uses: actions/upload-artifact@v2 + with: + name: pr + path: pr/ diff --git a/.github/workflows/build-prs.yml b/.github/workflows/build-prs.yml new file mode 100644 index 000000000000..6e04fb2f84ff --- /dev/null +++ b/.github/workflows/build-prs.yml @@ -0,0 +1,267 @@ +name: Build images for PRs +on: + workflow_run: + workflows: ["Trigger build images for PRs"] + types: + - completed +env: + QUAY_ORG: opendatahub + QUAY_ID: ${{ secrets.QUAY_ROBOT_USERNAME }} + QUAY_TOKEN: ${{ secrets.QUAY_ROBOT_TOKEN }} + GH_USER_EMAIL: 140449482+dsp-developers@users.noreply.github.com + GH_USER_NAME: dsp-developers +jobs: + fetch-data: + name: Fetch workflow payload + runs-on: ubuntu-latest + if: > + github.event.workflow_run.event == 'pull_request' && + github.event.workflow_run.conclusion == 'success' + outputs: + pr_state: ${{ steps.vars.outputs.pr_state }} + pr_number: ${{ steps.vars.outputs.pr_number }} + head_sha: ${{ steps.vars.outputs.head_sha }} + event_action: ${{ steps.vars.outputs.event_action }} + steps: + - name: 'Download artifact' + uses: actions/github-script@v3.1.0 + with: + script: | + var artifacts = await github.actions.listWorkflowRunArtifacts({ + owner: context.repo.owner, + repo: context.repo.repo, + run_id: ${{github.event.workflow_run.id }}, + }); + var matchArtifact = artifacts.data.artifacts.filter((artifact) => { + return artifact.name == "pr" + })[0]; + var download = await github.actions.downloadArtifact({ + owner: context.repo.owner, + repo: context.repo.repo, + artifact_id: matchArtifact.id, + archive_format: 'zip', + }); + var fs = require('fs'); + fs.writeFileSync('${{github.workspace}}/pr.zip', Buffer.from(download.data)); + - run: unzip pr.zip + - shell: bash + id: vars + run: | + pr_number=$(cat ./pr_number) + pr_state=$(cat ./pr_state) + head_sha=$(cat ./head_sha) + event_action=$(cat ./event_action) + echo "pr_number=${pr_number}" >> $GITHUB_OUTPUT + echo "pr_state=${pr_state}" >> $GITHUB_OUTPUT + echo "head_sha=${head_sha}" >> $GITHUB_OUTPUT + echo "event_action=${event_action}" >> $GITHUB_OUTPUT + + build-pr-images: + name: Build DSP Images + if: needs.fetch-data.outputs.pr_state == 'open' + runs-on: ubuntu-latest + needs: fetch-data + env: + SOURCE_BRANCH: ${{ needs.fetch-data.outputs.head_sha }} + TARGET_IMAGE_TAG: pr-${{ needs.fetch-data.outputs.pr_number }} + strategy: + fail-fast: false + matrix: + include: + - image: ds-pipelines-api-server + dockerfile: backend/Dockerfile + - image: ds-pipelines-frontend + dockerfile: frontend/Dockerfile + - image: ds-pipelines-cacheserver + dockerfile: backend/Dockerfile.cacheserver + - image: ds-pipelines-persistenceagent + dockerfile: backend/Dockerfile.persistenceagent + - image: ds-pipelines-scheduledworkflow + dockerfile: backend/Dockerfile.scheduledworkflow + - image: ds-pipelines-viewercontroller + dockerfile: backend/Dockerfile.viewercontroller + - image: ds-pipelines-artifact-manager + dockerfile: backend/artifact_manager/Dockerfile + - image: ds-pipelines-metadata-writer + dockerfile: backend/metadata_writer/Dockerfile + - image: ds-pipelines-metadata-grpc + dockerfile: third_party/ml-metadata/Dockerfile + - image: ds-pipelines-metadata-envoy + dockerfile: third_party/metadata_envoy/Dockerfile + - image: ds-pipelines-driver + dockerfile: backend/Dockerfile.driver + - image: ds-pipelines-launcher + dockerfile: backend/Dockerfile.launcher + steps: + - uses: actions/checkout@v3 + - name: Build Image + uses: ./.github/actions/build + with: + OVERWRITE: true + IMAGE_REPO: ${{ matrix.image }} + DOCKERFILE: ${{ matrix.dockerfile }} + GH_REPO: ${{ github.repository }} + + comment-on-pr: + name: Comment on PR after images built + runs-on: ubuntu-latest + needs: [fetch-data, build-pr-images] + concurrency: + group: ${{ github.workflow }}-comment-on-pr-${{ needs.fetch-data.outputs.pr_number }} + cancel-in-progress: true + env: + SOURCE_BRANCH: ${{ needs.fetch-data.outputs.head_sha }} + TARGET_IMAGE_TAG: pr-${{ needs.fetch-data.outputs.pr_number }} + steps: + - uses: actions/checkout@v3 + - name: Echo PR metadata + shell: bash + env: + GH_TOKEN: ${{ secrets.GH_TOKEN_PROJECT_EDIT }} + run: | + echo ${{ needs.fetch-data.outputs.head_sha }} + echo ${{ needs.fetch-data.outputs.pr_number }} + echo ${{ needs.fetch-data.outputs.pr_state }} + echo ${{ needs.fetch-data.outputs.event_action }} + - name: Send comment + shell: bash + env: + GH_TOKEN: ${{ secrets.GH_TOKEN_PROJECT_EDIT }} + FULLIMG_API_SERVER: quay.io/${{ env.QUAY_ORG }}/ds-pipelines-api-server:${{ env.TARGET_IMAGE_TAG }} + FULLIMG_FRONTEND: quay.io/${{ env.QUAY_ORG }}/ds-pipelines-frontend:${{ env.TARGET_IMAGE_TAG }} + FULLIMG_CACHESERVER: quay.io/${{ env.QUAY_ORG }}/ds-pipelines-cacheserver:${{ env.TARGET_IMAGE_TAG }} + FULLIMG_PERSISTENCEAGENT: quay.io/${{ env.QUAY_ORG }}/ds-pipelines-persistenceagent:${{ env.TARGET_IMAGE_TAG }} + FULLIMG_SCHEDULEDWORKFLOW: quay.io/${{ env.QUAY_ORG }}/ds-pipelines-scheduledworkflow:${{ env.TARGET_IMAGE_TAG }} + FULLIMG_VIEWERCONTROLLER: quay.io/${{ env.QUAY_ORG }}/ds-pipelines-viewercontroller:${{ env.TARGET_IMAGE_TAG }} + FULLIMG_ARTIFACT_MANAGER: quay.io/${{ env.QUAY_ORG }}/ds-pipelines-artifact-manager:${{ env.TARGET_IMAGE_TAG }} + FULLIMG_METADATA_WRITER: quay.io/${{ env.QUAY_ORG }}/ds-pipelines-metadata-writer:${{ env.TARGET_IMAGE_TAG }} + FULLIMG_METADATA_ENVOY: quay.io/${{ env.QUAY_ORG }}/ds-pipelines-metadata-envoy:${{ env.TARGET_IMAGE_TAG }} + FULLIMG_METADATA_GRPC: quay.io/${{ env.QUAY_ORG }}/ds-pipelines-metadata-grpc:${{ env.TARGET_IMAGE_TAG }} + FULLIMG_DRIVER: quay.io/${{ env.QUAY_ORG }}/ds-pipelines-driver:${{ env.TARGET_IMAGE_TAG }} + FULLIMG_LAUNCHER: quay.io/${{ env.QUAY_ORG }}/ds-pipelines-launcher:${{ env.TARGET_IMAGE_TAG }} + run: | + git config user.email "${{ env.GH_USER_EMAIL }}" + git config user.name "${{ env.GH_USER_NAME }}" + + action=${{ needs.fetch-data.outputs.event_action }} + + if [[ "$action" == "synchronize" ]]; then + echo "Change to PR detected. A new PR build was completed." >> /tmp/body-file.txt + fi + + if [[ "$action" == "reopened" ]]; then + echo "PR was re-opened." >> /tmp/body-file.txt + fi + + cat <<"EOF" >> /tmp/body-file.txt + A set of new images have been built to help with testing out this PR: + **API Server**: `${{ env.FULLIMG_API_SERVER }}` + **DSP DRIVER**: `${{ env.FULLIMG_DRIVER }}` + **DSP LAUNCHER**: `${{ env.FULLIMG_LAUNCHER }}` + **Persistence Agent**: `${{ env.FULLIMG_PERSISTENCEAGENT }}` + **Scheduled Workflow Manager**: `${{ env.FULLIMG_SCHEDULEDWORKFLOW }}` + **CRD Viewer Controller**: `${{ env.FULLIMG_VIEWERCONTROLLER }}` + **Artifact Manager**: `${{ env.FULLIMG_ARTIFACT_MANAGER }}` + **MLMD Server**: `${{ env.FULLIMG_METADATA_GRPC }}` + **MLMD Writer**: `${{ env.FULLIMG_METADATA_WRITER }}` + **MLMD Envoy Proxy**: `${{ env.FULLIMG_METADATA_ENVOY }}` + **Cache Server**: `${{ env.FULLIMG_CACHESERVER }}` + **UI**: `${{ env.FULLIMG_FRONTEND }}` + EOF + + gh pr comment ${{ needs.fetch-data.outputs.pr_number }} --body-file /tmp/body-file.txt + + if [[ "$action" == "opened" || "$action" == "reopened" ]]; then + cat <<"EOF" >> /tmp/additional-comment.txt + An OCP cluster where you are logged in as cluster admin is required. + + The Data Science Pipelines team recommends testing this using the Data Science Pipelines Operator. Check [here](https://github.com/opendatahub-io/data-science-pipelines-operator) for more information on using the DSPO. + + To use and deploy a DSP stack with these images (assuming the DSPO is deployed), first save the following YAML to a file named `dspa.pr-${{ needs.fetch-data.outputs.pr_number}}.yaml`: + ```yaml + apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 + kind: DataSciencePipelinesApplication + metadata: + name: pr-${{ needs.fetch-data.outputs.pr_number}} + spec: + apiServer: + image: "${{ env.FULLIMG_API_SERVER }}" + artifactImage: "${{ env.FULLIMG_ARTIFACT_MANAGER }}" + argoDriverImage: "${{ env.FULLIMG_DRIVER }}" + argoLauncherImage: "${{ env.FULLIMG_LAUNCHER }}" + persistenceAgent: + image: "${{ env.FULLIMG_PERSISTENCEAGENT }}" + scheduledWorkflow: + image: "${{ env.FULLIMG_SCHEDULEDWORKFLOW }}" + crdViewer: + deploy: true # Optional component + image: "${{ env.FULLIMG_VIEWERCONTROLLER }}" + mlmd: + deploy: true # Optional component + grpc: + image: "${{ env.FULLIMG_METADATA_GRPC }}" + envoy: + image: "${{ env.FULLIMG_METADATA_ENVOY }}" + writer: + image: "${{ env.FULLIMG_METADATA_WRITER }}" + mlpipelineUI: + deploy: true # Optional component + image: "${{ env.FULLIMG_FRONTEND }}" + objectStorage: + minio: + deploy: true + image: 'quay.io/opendatahub/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance' + ``` + + Then run the following: + + ```bash + cd $(mktemp -d) + git clone git@github.com:opendatahub-io/data-science-pipelines.git + cd data-science-pipelines/ + git fetch origin pull/${{ needs.fetch-data.outputs.pr_number }}/head + git checkout -b pullrequest ${{ env.SOURCE_BRANCH }} + oc apply -f dspa.pr-${{ needs.fetch-data.outputs.pr_number}}.yaml + ``` + + More instructions [here](https://github.com/opendatahub-io/data-science-pipelines-operator#deploy-dsp-instance) on how to deploy and test a Data Science Pipelines Application. + + EOF + gh pr comment ${{ needs.fetch-data.outputs.pr_number }} --body-file /tmp/additional-comment.txt + fi + clean-pr-images: + name: Cleanup images if PR is closed + if: needs.fetch-data.outputs.pr_state == 'closed' + runs-on: ubuntu-latest + needs: fetch-data + env: + TARGET_IMAGE_TAG: pr-${{ needs.fetch-data.outputs.pr_number }} + strategy: + fail-fast: false + matrix: + image: + - ds-pipelines-api-server + - ds-pipelines-frontend + - ds-pipelines-cacheserver + - ds-pipelines-persistenceagent + - ds-pipelines-scheduledworkflow + - ds-pipelines-viewercontroller + - ds-pipelines-artifact-manager + - ds-pipelines-launcher + - ds-pipelines-driver + - ds-pipelines-metadata-writer + - ds-pipelines-metadata-grpc + - ds-pipelines-metadata-envoy + steps: + - name: Delete PR image + shell: bash + run: | + tag=$(curl --request GET 'https://quay.io/api/v1/repository/${{ env.QUAY_ORG }}/${{ matrix.image }}/tag/?specificTag=${{ env.TARGET_IMAGE_TAG }}') + exists=$(echo ${tag} | yq .tags - | yq any) + IMAGE=quay.io/${{ env.QUAY_ORG }}/${{ matrix.image }}:${{ env.TARGET_IMAGE_TAG }} + if [[ "$exists" == "true" ]]; then + echo "PR Closed deleting image...${{ matrix.image }}." + skopeo delete --creds ${{ env.QUAY_ID }}:${{ env.QUAY_TOKEN }} docker://${IMAGE} + else + echo "Deletion of image ${IMAGE} skipped because image already does not exist." + fi diff --git a/.github/workflows/tag-release-quay.yml b/.github/workflows/tag-release-quay.yml new file mode 100644 index 000000000000..5fbdc5f8d0fa --- /dev/null +++ b/.github/workflows/tag-release-quay.yml @@ -0,0 +1,86 @@ +name: Image push per Github Tag + +# This GitHub action activates whenever a new tag is created on the repo under opendatahub-io org +# and creates a copy of the image of the associated commit hash with the +# appropriate tag name. + +run-name: Creating new tag in quay based on pushed tag in Github. +on: + workflow_dispatch: + inputs: + target_tag: + default: 'vx.y.z' + description: 'DSP Tag' + required: true +env: + IMAGE_SERVER: quay.io/opendatahub/ds-pipelines-api-server + IMAGE_UI: quay.io/opendatahub/ds-pipelines-frontend + IMAGE_CACHE: quay.io/opendatahub/ds-pipelines-cacheserver + IMAGE_PA: quay.io/opendatahub/ds-pipelines-persistenceagent + IMAGE_SWF: quay.io/opendatahub/ds-pipelines-scheduledworkflow + IMAGE_VC: quay.io/opendatahub/ds-pipelines-viewercontroller + IMAGE_ARTIFACT: quay.io/opendatahub/ds-pipelines-artifact-manager + IMAGE_MLMD_WRITER: quay.io/opendatahub/ds-pipelines-metadata-writer + IMAGE_MLMD_ENVOY: quay.io/opendatahub/ds-pipelines-metadata-envoy + IMAGE_MLMD_GRPC: quay.io/opendatahub/ds-pipelines-metadata-grpc + IMAGE_LAUNCHER: quay.io/opendatahub/ds-pipelines-launcher + IMAGE_DRIVER: quay.io/opendatahub/ds-pipelines-driver +jobs: + copy-tag-to-quay: + runs-on: ubuntu-latest + if: github.repository == 'opendatahub-io/data-science-pipelines' + steps: + - name: Git checkout + uses: actions/checkout@v3 + with: + fetch-depth: '0' + - name: Install skopeo + shell: bash + run: | + sudo apt-get -y update + sudo apt-get -y install skopeo + - name: Login to quay.io + shell: bash + env: + QUAY_ROBOT_USERNAME: ${{ secrets.QUAY_ROBOT_USERNAME }} + QUAY_ROBOT_TOKEN: ${{ secrets.QUAY_ROBOT_TOKEN }} + run: | + skopeo login quay.io -u ${QUAY_ROBOT_USERNAME} -p ${QUAY_ROBOT_TOKEN} + - name: Get latest tag hash + id: hash + run: echo "hash=$(git rev-parse --short=7 ${{ inputs.target_tag }} )" >> ${GITHUB_OUTPUT} + - name: Create new tag + shell: bash + env: + TAG: ${{ inputs.target_tag }} + HASH: ${{ steps.hash.outputs.hash }} + run: | + skopeo copy docker://${IMAGE_SERVER}:main-${{ env.HASH }} docker://${IMAGE_SERVER}:${{ env.TAG }} + skopeo copy docker://${IMAGE_UI}:main-${{ env.HASH }} docker://${IMAGE_UI}:${{ env.TAG }} + skopeo copy docker://${IMAGE_CACHE}:main-${{ env.HASH }} docker://${IMAGE_CACHE}:${{ env.TAG }} + skopeo copy docker://${IMAGE_PA}:main-${{ env.HASH }} docker://${IMAGE_PA}:${{ env.TAG }} + skopeo copy docker://${IMAGE_SWF}:main-${{ env.HASH }} docker://${IMAGE_SWF}:${{ env.TAG }} + skopeo copy docker://${IMAGE_VC}:main-${{ env.HASH }} docker://${IMAGE_VC}:${{ env.TAG }} + skopeo copy docker://${IMAGE_ARTIFACT}:main-${{ env.HASH }} docker://${IMAGE_ARTIFACT}:${{ env.TAG }} + skopeo copy docker://${IMAGE_MLMD_WRITER}:main-${{ env.HASH }} docker://${IMAGE_MLMD_WRITER}:${{ env.TAG }} + skopeo copy docker://${IMAGE_MLMD_ENVOY}:main-${{ env.HASH }} docker://${IMAGE_MLMD_ENVOY}:${{ env.TAG }} + skopeo copy docker://${IMAGE_MLMD_GRPC}:main-${{ env.HASH }} docker://${IMAGE_MLMD_GRPC}:${{ env.TAG }} + skopeo copy docker://${IMAGE_LAUNCHER}:main-${{ env.HASH }} docker://${IMAGE_LAUNCHER}:${{ env.TAG }} + skopeo copy docker://${IMAGE_DRIVER}:main-${{ env.HASH }} docker://${IMAGE_DRIVER}:${{ env.TAG }} + - name: Create latest tag + shell: bash + env: + HASH: ${{ steps.hash.outputs.hash }} + run: | + skopeo copy docker://${IMAGE_SERVER}:main-${{ env.HASH }} docker://${IMAGE_SERVER}:latest + skopeo copy docker://${IMAGE_UI}:main-${{ env.HASH }} docker://${IMAGE_UI}:latest + skopeo copy docker://${IMAGE_CACHE}:main-${{ env.HASH }} docker://${IMAGE_CACHE}:latest + skopeo copy docker://${IMAGE_PA}:main-${{ env.HASH }} docker://${IMAGE_PA}:latest + skopeo copy docker://${IMAGE_SWF}:main-${{ env.HASH }} docker://${IMAGE_SWF}:latest + skopeo copy docker://${IMAGE_VC}:main-${{ env.HASH }} docker://${IMAGE_VC}:latest + skopeo copy docker://${IMAGE_ARTIFACT}:main-${{ env.HASH }} docker://${IMAGE_ARTIFACT}:latest + skopeo copy docker://${IMAGE_MLMD_WRITER}:main-${{ env.HASH }} docker://${IMAGE_MLMD_WRITER}:latest + skopeo copy docker://${IMAGE_MLMD_ENVOY}:main-${{ env.HASH }} docker://${IMAGE_MLMD_ENVOY}:latest + skopeo copy docker://${IMAGE_MLMD_GRPC}:main-${{ env.HASH }} docker://${IMAGE_MLMD_GRPC}:latest + skopeo copy docker://${IMAGE_LAUNCHER}:main-${{ env.HASH }} docker://${IMAGE_LAUNCHER}:latest + skopeo copy docker://${IMAGE_DRIVER}:main-${{ env.HASH }} docker://${IMAGE_DRIVER}:latest