Skip to content

Commit

Permalink
Add script for component image release (#571)
Browse files Browse the repository at this point in the history
* add script

* update script

* fix

* Update .release.cloudbuild.yaml

* comments

* update
  • Loading branch information
IronPan authored Dec 20, 2018
1 parent ca22c29 commit 379d4d7
Show file tree
Hide file tree
Showing 10 changed files with 96 additions and 296 deletions.
264 changes: 0 additions & 264 deletions .release.cloudbuild.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -116,229 +116,6 @@ steps:
id: 'copyPythonSDKToLatest'
waitFor: ['copyPythonSDKLocal']

# Pull and retag the images for the Dataflow-based pipeline components
- name: 'gcr.io/cloud-builders/docker'
args: ['pull', 'gcr.io/$PROJECT_ID/ml-pipeline-dataflow-tf-predict:$COMMIT_SHA']
id: 'pullPredict'
- name: 'gcr.io/cloud-builders/docker'
args: ['tag', 'gcr.io/$PROJECT_ID/ml-pipeline-dataflow-tf-predict:$COMMIT_SHA', 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tf-predict:$TAG_NAME']
id: 'tagPredictVersionNumber'
waitFor: ['pullPredict']
- name: 'gcr.io/cloud-builders/docker'
args: ['tag', 'gcr.io/$PROJECT_ID/ml-pipeline-dataflow-tf-predict:$COMMIT_SHA', 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tf-predict:$COMMIT_SHA']
id: 'tagPredictCommitSHA'
waitFor: ['pullPredict']
- name: 'gcr.io/cloud-builders/docker'
args: ['pull', 'gcr.io/$PROJECT_ID/ml-pipeline-dataflow-tfdv:$COMMIT_SHA']
id: 'pullTFDV'
- name: 'gcr.io/cloud-builders/docker'
args: ['tag', 'gcr.io/$PROJECT_ID/ml-pipeline-dataflow-tfdv:$COMMIT_SHA', 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tfdv:$TAG_NAME']
id: 'tagTFDVVersionNumber'
waitFor: ['pullTFDV']
- name: 'gcr.io/cloud-builders/docker'
args: ['tag', 'gcr.io/$PROJECT_ID/ml-pipeline-dataflow-tfdv:$COMMIT_SHA', 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tfdv:$COMMIT_SHA']
id: 'tagTFDVCommitSHA'
waitFor: ['pullTFDV']
- name: 'gcr.io/cloud-builders/docker'
args: ['pull', 'gcr.io/$PROJECT_ID/ml-pipeline-dataflow-tft:$COMMIT_SHA']
id: 'pullTFT'
- name: 'gcr.io/cloud-builders/docker'
args: ['tag', 'gcr.io/$PROJECT_ID/ml-pipeline-dataflow-tft:$COMMIT_SHA', 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tft:$TAG_NAME']
id: 'tagTFTVersionNumber'
waitFor: ['pullTFT']
- name: 'gcr.io/cloud-builders/docker'
args: ['tag', 'gcr.io/$PROJECT_ID/ml-pipeline-dataflow-tft:$COMMIT_SHA', 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tft:$COMMIT_SHA']
id: 'tagTFTCommitSHA'
waitFor: ['pullTFT']
- name: 'gcr.io/cloud-builders/docker'
args: ['pull', 'gcr.io/$PROJECT_ID/ml-pipeline-dataflow-tfma:$COMMIT_SHA']
id: 'pullTFMA'
- name: 'gcr.io/cloud-builders/docker'
args: ['tag', 'gcr.io/$PROJECT_ID/ml-pipeline-dataflow-tfma:$COMMIT_SHA', 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tfma:$TAG_NAME']
id: 'tagTFMAVersionNumber'
waitFor: ['pullTFMA']
- name: 'gcr.io/cloud-builders/docker'
args: ['tag', 'gcr.io/$PROJECT_ID/ml-pipeline-dataflow-tfma:$COMMIT_SHA', 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tfma:$COMMIT_SHA']
id: 'tagTFMACommitSHA'
waitFor: ['pullTFMA']

# Pull and retag the images for the Kubeflow-based pipeline components
- name: 'gcr.io/cloud-builders/docker'
args: ['pull', 'gcr.io/$PROJECT_ID/ml-pipeline-kubeflow-deployer:$COMMIT_SHA']
id: 'pullDeployer'
- name: 'gcr.io/cloud-builders/docker'
args: ['tag', 'gcr.io/$PROJECT_ID/ml-pipeline-kubeflow-deployer:$COMMIT_SHA', 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-deployer:$TAG_NAME']
id: 'tagDeployerVersionNumber'
waitFor: ['pullDeployer']
- name: 'gcr.io/cloud-builders/docker'
args: ['tag', 'gcr.io/$PROJECT_ID/ml-pipeline-kubeflow-deployer:$COMMIT_SHA', 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-deployer:$COMMIT_SHA']
id: 'tagDeployerCommitSHA'
waitFor: ['pullDeployer']
- name: 'gcr.io/cloud-builders/docker'
args: ['pull', 'gcr.io/$PROJECT_ID/ml-pipeline-kubeflow-tf-trainer:$COMMIT_SHA']
id: 'pullTrainer'
- name: 'gcr.io/cloud-builders/docker'
args: ['tag', 'gcr.io/$PROJECT_ID/ml-pipeline-kubeflow-tf-trainer:$COMMIT_SHA', 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:$TAG_NAME']
id: 'tagTrainerVersionNumber'
waitFor: ['pullTrainer']
- name: 'gcr.io/cloud-builders/docker'
args: ['tag', 'gcr.io/$PROJECT_ID/ml-pipeline-kubeflow-tf-trainer:$COMMIT_SHA', 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:$COMMIT_SHA']
id: 'tagTrainerCommitSHA'
waitFor: ['pullTrainer']
- name: 'gcr.io/cloud-builders/docker'
args: ['pull', 'gcr.io/$PROJECT_ID/ml-pipeline-kubeflow-tf-trainer-gpu:$COMMIT_SHA']
id: 'pullGpuTrainer'
- name: 'gcr.io/cloud-builders/docker'
args: ['tag', 'gcr.io/$PROJECT_ID/ml-pipeline-kubeflow-tf-trainer-gpu:$COMMIT_SHA', 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer-gpu:$TAG_NAME']
id: 'tagGpuTrainerVersionNumber'
waitFor: ['pullGpuTrainer']
- name: 'gcr.io/cloud-builders/docker'
args: ['tag', 'gcr.io/$PROJECT_ID/ml-pipeline-kubeflow-tf-trainer-gpu:$COMMIT_SHA', 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer-gpu:$COMMIT_SHA']
id: 'tagGpuTrainerCommitSHA'
waitFor: ['pullGpuTrainer']
- name: 'gcr.io/cloud-builders/docker'
args: ['pull', 'gcr.io/$PROJECT_ID/ml-pipeline-kubeflow-tf:$COMMIT_SHA']
id: 'pullLauncher'
- name: 'gcr.io/cloud-builders/docker'
entrypoint: '/bin/bash'
args: ['-c', 'printf "FROM gcr.io/$PROJECT_ID/ml-pipeline-kubeflow-tf:$COMMIT_SHA\nENV TRAINER_IMAGE_NAME gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:$COMMIT_SHA" > Dockerfile; docker build -t ml-pipeline-kubeflow-tf .']
id: 'buildLauncher'
- name: 'gcr.io/cloud-builders/docker'
args: ['tag', 'ml-pipeline-kubeflow-tf', 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf:$TAG_NAME']
id: 'tagLauncherVersionNumber'
waitFor: ['buildLauncher']
- name: 'gcr.io/cloud-builders/docker'
args: ['tag', 'ml-pipeline-kubeflow-tf', 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf:$COMMIT_SHA']
id: 'tagLauncherCommitSHA'
waitFor: ['buildLauncher']

# Pull and retag the images for the Dataproc-based pipeline components
- name: 'gcr.io/cloud-builders/docker'
args: ['pull', 'gcr.io/$PROJECT_ID/ml-pipeline-dataproc-analyze:$COMMIT_SHA']
id: 'pullDataprocAnalyze'
- name: 'gcr.io/cloud-builders/docker'
args: ['tag', 'gcr.io/$PROJECT_ID/ml-pipeline-dataproc-analyze:$COMMIT_SHA', 'gcr.io/ml-pipeline/ml-pipeline-dataproc-analyze:$TAG_NAME']
id: 'tagDataprocAnalyzeVersionNumber'
waitFor: ['pullDataprocAnalyze']
- name: 'gcr.io/cloud-builders/docker'
args: ['tag', 'gcr.io/$PROJECT_ID/ml-pipeline-dataproc-analyze:$COMMIT_SHA', 'gcr.io/ml-pipeline/ml-pipeline-dataproc-analyze:$COMMIT_SHA']
id: 'tagDataprocAnalyzeCommitSHA'
waitFor: ['pullDataprocAnalyze']
- name: 'gcr.io/cloud-builders/docker'
args: ['pull', 'gcr.io/$PROJECT_ID/ml-pipeline-dataproc-create-cluster:$COMMIT_SHA']
id: 'pullDataprocCreateCluster'
- name: 'gcr.io/cloud-builders/docker'
args: ['tag', 'gcr.io/$PROJECT_ID/ml-pipeline-dataproc-create-cluster:$COMMIT_SHA', 'gcr.io/ml-pipeline/ml-pipeline-dataproc-create-cluster:$TAG_NAME']
id: 'tagDataprocCreateClusterVersionNumber'
waitFor: ['pullDataprocCreateCluster']
- name: 'gcr.io/cloud-builders/docker'
args: ['tag', 'gcr.io/$PROJECT_ID/ml-pipeline-dataproc-create-cluster:$COMMIT_SHA', 'gcr.io/ml-pipeline/ml-pipeline-dataproc-create-cluster:$COMMIT_SHA']
id: 'tagDataprocCreateClusterCommitSHA'
waitFor: ['pullDataprocCreateCluster']
- name: 'gcr.io/cloud-builders/docker'
args: ['pull', 'gcr.io/$PROJECT_ID/ml-pipeline-dataproc-delete-cluster:$COMMIT_SHA']
id: 'pullDataprocDeleteCluster'
- name: 'gcr.io/cloud-builders/docker'
args: ['tag', 'gcr.io/$PROJECT_ID/ml-pipeline-dataproc-delete-cluster:$COMMIT_SHA', 'gcr.io/ml-pipeline/ml-pipeline-dataproc-delete-cluster:$TAG_NAME']
id: 'tagDataprocDeleteClusterVersionNumber'
waitFor: ['pullDataprocDeleteCluster']
- name: 'gcr.io/cloud-builders/docker'
args: ['tag', 'gcr.io/$PROJECT_ID/ml-pipeline-dataproc-delete-cluster:$COMMIT_SHA', 'gcr.io/ml-pipeline/ml-pipeline-dataproc-delete-cluster:$COMMIT_SHA']
id: 'tagDataprocDeleteClusterCommitSHA'
waitFor: ['pullDataprocDeleteCluster']
- name: 'gcr.io/cloud-builders/docker'
args: ['pull', 'gcr.io/$PROJECT_ID/ml-pipeline-dataproc-predict:$COMMIT_SHA']
id: 'pullDataprocPredict'
- name: 'gcr.io/cloud-builders/docker'
args: ['tag', 'gcr.io/$PROJECT_ID/ml-pipeline-dataproc-predict:$COMMIT_SHA', 'gcr.io/ml-pipeline/ml-pipeline-dataproc-predict:$TAG_NAME']
id: 'tagDataprocPredictVersionNumber'
waitFor: ['pullDataprocPredict']
- name: 'gcr.io/cloud-builders/docker'
args: ['tag', 'gcr.io/$PROJECT_ID/ml-pipeline-dataproc-predict:$COMMIT_SHA', 'gcr.io/ml-pipeline/ml-pipeline-dataproc-predict:$COMMIT_SHA']
id: 'tagDataprocPredictCommitSHA'
waitFor: ['pullDataprocPredict']
- name: 'gcr.io/cloud-builders/docker'
args: ['pull', 'gcr.io/$PROJECT_ID/ml-pipeline-dataproc-transform:$COMMIT_SHA']
id: 'pullDataprocTransform'
- name: 'gcr.io/cloud-builders/docker'
args: ['tag', 'gcr.io/$PROJECT_ID/ml-pipeline-dataproc-transform:$COMMIT_SHA', 'gcr.io/ml-pipeline/ml-pipeline-dataproc-transform:$TAG_NAME']
id: 'tagDataprocTransformVersionNumber'
waitFor: ['pullDataprocTransform']
- name: 'gcr.io/cloud-builders/docker'
args: ['tag', 'gcr.io/$PROJECT_ID/ml-pipeline-dataproc-transform:$COMMIT_SHA', 'gcr.io/ml-pipeline/ml-pipeline-dataproc-transform:$COMMIT_SHA']
id: 'tagDataprocTransformCommitSHA'
waitFor: ['pullDataprocTransform']
- name: 'gcr.io/cloud-builders/docker'
args: ['pull', 'gcr.io/$PROJECT_ID/ml-pipeline-dataproc-train:$COMMIT_SHA']
id: 'pullDataprocTrain'
- name: 'gcr.io/cloud-builders/docker'
args: ['tag', 'gcr.io/$PROJECT_ID/ml-pipeline-dataproc-train:$COMMIT_SHA', 'gcr.io/ml-pipeline/ml-pipeline-dataproc-train:$TAG_NAME']
id: 'tagDataprocTrainVersionNumber'
waitFor: ['pullDataprocTrain']
- name: 'gcr.io/cloud-builders/docker'
args: ['tag', 'gcr.io/$PROJECT_ID/ml-pipeline-dataproc-train:$COMMIT_SHA', 'gcr.io/ml-pipeline/ml-pipeline-dataproc-train:$COMMIT_SHA']
id: 'tagDataprocTrainCommitSHA'
waitFor: ['pullDataprocTrain']

# Pull and retag the images for the ResNet CMLE sample pipeline components
- name: 'gcr.io/cloud-builders/docker'
args: ['pull', 'gcr.io/$PROJECT_ID/resnet-deploy:$COMMIT_SHA']
id: 'pullResNetDeploy'
- name: 'gcr.io/cloud-builders/docker'
args: ['tag', 'gcr.io/$PROJECT_ID/resnet-deploy:$COMMIT_SHA', 'gcr.io/ml-pipeline/resnet-deploy:$TAG_NAME']
id: 'tagResNetDeployVersionNumber'
waitFor: ['pullResNetDeploy']
- name: 'gcr.io/cloud-builders/docker'
args: ['tag', 'gcr.io/$PROJECT_ID/resnet-deploy:$COMMIT_SHA', 'gcr.io/ml-pipeline/resnet-deploy:$COMMIT_SHA']
id: 'tagResNetDeployCommitSHA'
waitFor: ['pullResNetDeploy']
- name: 'gcr.io/cloud-builders/docker'
args: ['pull', 'gcr.io/$PROJECT_ID/resnet-preprocess:$COMMIT_SHA']
id: 'pullResNetPreprocess'
- name: 'gcr.io/cloud-builders/docker'
args: ['tag', 'gcr.io/$PROJECT_ID/resnet-preprocess:$COMMIT_SHA', 'gcr.io/ml-pipeline/resnet-preprocess:$TAG_NAME']
id: 'tagResNetPreprocessVersionNumber'
waitFor: ['pullResNetPreprocess']
- name: 'gcr.io/cloud-builders/docker'
args: ['tag', 'gcr.io/$PROJECT_ID/resnet-preprocess:$COMMIT_SHA', 'gcr.io/ml-pipeline/resnet-preprocess:$COMMIT_SHA']
id: 'tagResNetPreprocessCommitSHA'
waitFor: ['pullResNetPreprocess']
- name: 'gcr.io/cloud-builders/docker'
args: ['pull', 'gcr.io/$PROJECT_ID/resnet-train:$COMMIT_SHA']
id: 'pullResNetTrain'
- name: 'gcr.io/cloud-builders/docker'
args: ['tag', 'gcr.io/$PROJECT_ID/resnet-train:$COMMIT_SHA', 'gcr.io/ml-pipeline/resnet-train:$TAG_NAME']
id: 'tagResNetTrainVersionNumber'
waitFor: ['pullResNetTrain']
- name: 'gcr.io/cloud-builders/docker'
args: ['tag', 'gcr.io/$PROJECT_ID/resnet-train:$COMMIT_SHA', 'gcr.io/ml-pipeline/resnet-train:$COMMIT_SHA']
id: 'tagResNetTrainCommitSHA'
waitFor: ['pullResNetTrain']

# Pull and retag the images for the local pipeline components
- name: 'gcr.io/cloud-builders/docker'
args: ['pull', 'gcr.io/$PROJECT_ID/ml-pipeline-local-confusion-matrix:$COMMIT_SHA']
id: 'pullLocalConfusionMatrix'
- name: 'gcr.io/cloud-builders/docker'
args: ['tag', 'gcr.io/$PROJECT_ID/ml-pipeline-local-confusion-matrix:$COMMIT_SHA', 'gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:$TAG_NAME']
id: 'tagLocalConfusionMatrixVersionNumber'
waitFor: ['pullLocalConfusionMatrix']
- name: 'gcr.io/cloud-builders/docker'
args: ['tag', 'gcr.io/$PROJECT_ID/ml-pipeline-local-confusion-matrix:$COMMIT_SHA', 'gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:$COMMIT_SHA']
id: 'tagLocalConfusionMatrixCommitSHA'
waitFor: ['pullLocalConfusionMatrix']
- name: 'gcr.io/cloud-builders/docker'
args: ['pull', 'gcr.io/$PROJECT_ID/ml-pipeline-local-roc:$COMMIT_SHA']
id: 'pullLocalROC'
- name: 'gcr.io/cloud-builders/docker'
args: ['tag', 'gcr.io/$PROJECT_ID/ml-pipeline-local-roc:$COMMIT_SHA', 'gcr.io/ml-pipeline/ml-pipeline-local-roc:$TAG_NAME']
id: 'tagLocalROCVersionNumber'
waitFor: ['pullLocalROC']
- name: 'gcr.io/cloud-builders/docker'
args: ['tag', 'gcr.io/$PROJECT_ID/ml-pipeline-local-roc:$COMMIT_SHA', 'gcr.io/ml-pipeline/ml-pipeline-local-roc:$COMMIT_SHA']
id: 'tagLocalROCCommitSHA'
waitFor: ['pullLocalROC']

# Build the tagged samples
- name: 'debian'
entrypoint: '/bin/bash'
Expand Down Expand Up @@ -366,45 +143,4 @@ images:
- 'gcr.io/ml-pipeline/bootstrapper:$TAG_NAME'
- 'gcr.io/ml-pipeline/bootstrapper:$COMMIT_SHA'

- 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tf-predict:$TAG_NAME'
- 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tf-predict:$COMMIT_SHA'
- 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tfdv:$TAG_NAME'
- 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tfdv:$COMMIT_SHA'
- 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tft:$TAG_NAME'
- 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tft:$COMMIT_SHA'
- 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tfma:$TAG_NAME'
- 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tfma:$COMMIT_SHA'

- 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-deployer:$TAG_NAME'
- 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-deployer:$COMMIT_SHA'
- 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:$TAG_NAME'
- 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:$COMMIT_SHA'
- 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf:$TAG_NAME'
- 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf:$COMMIT_SHA'

- 'gcr.io/ml-pipeline/ml-pipeline-dataproc-analyze:$TAG_NAME'
- 'gcr.io/ml-pipeline/ml-pipeline-dataproc-analyze:$COMMIT_SHA'
- 'gcr.io/ml-pipeline/ml-pipeline-dataproc-create-cluster:$TAG_NAME'
- 'gcr.io/ml-pipeline/ml-pipeline-dataproc-create-cluster:$COMMIT_SHA'
- 'gcr.io/ml-pipeline/ml-pipeline-dataproc-delete-cluster:$TAG_NAME'
- 'gcr.io/ml-pipeline/ml-pipeline-dataproc-delete-cluster:$COMMIT_SHA'
- 'gcr.io/ml-pipeline/ml-pipeline-dataproc-predict:$TAG_NAME'
- 'gcr.io/ml-pipeline/ml-pipeline-dataproc-predict:$COMMIT_SHA'
- 'gcr.io/ml-pipeline/ml-pipeline-dataproc-transform:$TAG_NAME'
- 'gcr.io/ml-pipeline/ml-pipeline-dataproc-transform:$COMMIT_SHA'
- 'gcr.io/ml-pipeline/ml-pipeline-dataproc-train:$TAG_NAME'
- 'gcr.io/ml-pipeline/ml-pipeline-dataproc-train:$COMMIT_SHA'

- 'gcr.io/ml-pipeline/resnet-deploy:$TAG_NAME'
- 'gcr.io/ml-pipeline/resnet-deploy:$COMMIT_SHA'
- 'gcr.io/ml-pipeline/resnet-preprocess:$TAG_NAME'
- 'gcr.io/ml-pipeline/resnet-preprocess:$COMMIT_SHA'
- 'gcr.io/ml-pipeline/resnet-train:$TAG_NAME'
- 'gcr.io/ml-pipeline/resnet-train:$COMMIT_SHA'

- 'gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:$TAG_NAME'
- 'gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:$COMMIT_SHA'
- 'gcr.io/ml-pipeline/ml-pipeline-local-roc:$TAG_NAME'
- 'gcr.io/ml-pipeline/ml-pipeline-local-roc:$COMMIT_SHA'

timeout: '1200s'
2 changes: 1 addition & 1 deletion bootstrapper.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ spec:
spec:
containers:
- name: deploy
image: gcr.io/ml-pipeline/bootstrapper:0.1.4 #TODO-release: update the release tag for the next release
image: gcr.io/ml-pipeline/bootstrapper:0.1.4
imagePullPolicy: 'Always'
# Additional parameter available:
args: [
Expand Down
2 changes: 1 addition & 1 deletion components/kubeflow/launcher/kubeflow_tfjob_launcher_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
def kubeflow_tfjob_launcher_op(container_image, command, number_of_workers: int, number_of_parameter_servers: int, tfjob_timeout_minutes: int, output_dir=None, step_name='TFJob-launcher'):
return dsl.ContainerOp(
name = step_name,
image = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf:0.1.4',#TODO-release: update the release tag for the next release
image = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf:85c6413a2e13da4b8f198aeac1abc2f3a74fe789',
arguments = [
'--workers', number_of_workers,
'--pss', number_of_parameter_servers,
Expand Down
6 changes: 3 additions & 3 deletions components/kubeflow/launcher/src/train.template.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ spec:
spec:
containers:
- name: tensorflow
image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:0.1.4 #TODO-release: update the release tag for the next release
image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:85c6413a2e13da4b8f198aeac1abc2f3a74fe789
command:
- python
- -m
Expand All @@ -38,7 +38,7 @@ spec:
spec:
containers:
- name: tensorflow
image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:0.1.4 #TODO-release: update the release tag for the next release
image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:85c6413a2e13da4b8f198aeac1abc2f3a74fe789
command:
- python
- -m
Expand All @@ -50,7 +50,7 @@ spec:
spec:
containers:
- name: tensorflow
image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:0.1.4 #TODO-release: update the release tag for the next release
image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:85c6413a2e13da4b8f198aeac1abc2f3a74fe789
command:
- python
- -m
Expand Down
64 changes: 64 additions & 0 deletions components/release.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
#!/bin/bash
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

# This script automated the process to release the component images.
# To run it, find a good release candidate commit SHA from ml-pipeline-staging project,
# and provide a full github COMMIT SHA to the script. E.g.
# ./release.sh 2118baf752d3d30a8e43141165e13573b20d85b8
# The script copies the images from staging to prod, and update the local code.
# You can then send a PR using your local branch.

set -xe

images=(
"ml-pipeline-dataflow-tf-predict"
"ml-pipeline-dataflow-tfdv"
"ml-pipeline-dataflow-tft"
"ml-pipeline-dataflow-tfma"
"ml-pipeline-kubeflow-deployer"
"ml-pipeline-kubeflow-tf-trainer"
"ml-pipeline-kubeflow-tf"
"ml-pipeline-dataproc-analyze"
"ml-pipeline-dataproc-create-cluster"
"ml-pipeline-dataproc-delete-cluster"
"ml-pipeline-dataproc-predict"
"ml-pipeline-dataproc-transform"
"ml-pipeline-dataproc-train"
"resnet-deploy"
"resnet-preprocess"
"resnet-train"
"ml-pipeline-local-confusion-matrix"
"ml-pipeline-local-roc"
)

COMMIT_SHA=$1
FROM_GCR_PREFIX='gcr.io/ml-pipeline-staging/'
TO_GCR_PREFIX='gcr.io/ml-pipeline/'
PARENT_PATH=$( cd "$(dirname "${BASH_SOURCE[0]}")" ; pwd -P )

for image in "${images[@]}"
do
TARGET_IMAGE_BASE=${TO_GCR_PREFIX}${image}
TARGET_IMAGE=${TARGET_IMAGE_BASE}:${COMMIT_SHA}

# Move image from staging to prod GCR
gcloud container images add-tag --quiet \
${FROM_GCR_PREFIX}${image}:${COMMIT_SHA} ${TARGET_IMAGE}

# Update the code
find "${PARENT_PATH}/../samples" -type f | while read file; do sed -i -e "s|${TARGET_IMAGE_BASE}:\([a-zA-Z0-9_.-]\)\+|${TARGET_IMAGE}|g" "$file"; done
find "${PARENT_PATH}" -type f | while read file; do sed -i -e "s|${TARGET_IMAGE_BASE}:\([a-zA-Z0-9_.-]\)\+|${TARGET_IMAGE}|g" "$file"; done
done
Loading

0 comments on commit 379d4d7

Please sign in to comment.