Skip to content

Commit

Permalink
build: add release automation
Browse files Browse the repository at this point in the history
  • Loading branch information
parthea committed Oct 23, 2024
1 parent ec326d5 commit aae0830
Show file tree
Hide file tree
Showing 11 changed files with 1,221 additions and 0 deletions.
2 changes: 2 additions & 0 deletions .github/release-please.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
releaseType: python
handleGHRelease: true
1 change: 1 addition & 0 deletions .github/release-trigger.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
enabled: true
43 changes: 43 additions & 0 deletions .kokoro/populate-secrets.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
#!/bin/bash
# Copyright 2024 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

set -eo pipefail

function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;}
function msg { println "$*" >&2 ;}
function println { printf '%s\n' "$(now) $*" ;}


# Populates requested secrets set in SECRET_MANAGER_KEYS from service account:
# kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com
SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager"
msg "Creating folder on disk for secrets: ${SECRET_LOCATION}"
mkdir -p ${SECRET_LOCATION}
for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g")
do
msg "Retrieving secret ${key}"
docker run --entrypoint=gcloud \
--volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \
gcr.io/google.com/cloudsdktool/cloud-sdk \
secrets versions access latest \
--project cloud-devrel-kokoro-resources \
--secret ${key} > \
"${SECRET_LOCATION}/${key}"
if [[ $? == 0 ]]; then
msg "Secret written to ${SECRET_LOCATION}/${key}"
else
msg "Error retrieving secret ${key}"
fi
done
29 changes: 29 additions & 0 deletions .kokoro/release.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
#!/bin/bash
# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

set -eo pipefail

# Start the releasetool reporter
python3 -m pip install --require-hashes -r github/dataproc-spark-connect-python/.kokoro/requirements.txt
python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script

# Disable buffering, so that the logs stream through.
export PYTHONUNBUFFERED=1

# Move into the package, build the distribution and upload.
TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-dataproc-spark-connect-1")
cd github/dataproc-spark-connect-python
python3 setup.py sdist bdist_wheel
twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/*
49 changes: 49 additions & 0 deletions .kokoro/release/common.cfg
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
# Format: //devtools/kokoro/config/proto/build.proto

# Build logs will be here
action {
define_artifacts {
regex: "**/*sponge_log.xml"
}
}

# Download trampoline resources.
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"

# Use the trampoline script to run in docker.
build_file: "dataproc-spark-connect-python/.kokoro/trampoline.sh"

# Configure the docker image for kokoro-trampoline.
env_vars: {
key: "TRAMPOLINE_IMAGE"
value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
}
env_vars: {
key: "TRAMPOLINE_BUILD_FILE"
value: "github/dataproc-spark-connect-python/.kokoro/release.sh"
}

# Fetch PyPI password
before_action {
fetch_keystore {
keystore_resource {
keystore_config_id: 73713
keyname: "google-cloud-pypi-token-keystore-dataproc-spark-connect-1"
}
}
}

# Tokens needed to report release status back to GitHub
env_vars: {
key: "SECRET_MANAGER_KEYS"
value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem"
}

# Store the packages we uploaded to PyPI. That way, we have a record of exactly
# what we published, which we can use to generate SBOMs and attestations.
action {
define_artifacts {
regex: "github/dataproc-spark-connect-python/**/*.tar.gz"
strip_prefix: "github/dataproc-spark-connect-python"
}
}
1 change: 1 addition & 0 deletions .kokoro/release/release.cfg
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
# Format: //devtools/kokoro/config/proto/build.proto
11 changes: 11 additions & 0 deletions .kokoro/requirements.in
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
gcp-docuploader
gcp-releasetool>=2 # required for compatibility with cryptography>=42.x
importlib-metadata
typing-extensions
twine
wheel
setuptools
nox>=2022.11.21 # required to remove dependency on py
charset-normalizer<3
click<8.1.0
cryptography>=42.0.5
Loading

0 comments on commit aae0830

Please sign in to comment.